diff --git a/.tox/.package.lock b/.tox/.package.lock deleted file mode 100755 index e69de29..0000000 diff --git a/.tox/log/.lock b/.tox/log/.lock deleted file mode 100755 index e69de29..0000000 diff --git a/.tox/log/GLOB-0.log b/.tox/log/GLOB-0.log deleted file mode 100644 index 16f01a7..0000000 --- a/.tox/log/GLOB-0.log +++ /dev/null @@ -1,54 +0,0 @@ -action: GLOB, msg: packaging -cwd: /home/amnesia/dev/mega.py -cmd: /home/amnesia/.pyenv/versions/3.7.4/envs/megapy/bin/python3.7 setup.py sdist --formats=zip --dist-dir /home/amnesia/dev/mega.py/.tox/dist -running sdist -running egg_info -writing src/mega.py.egg-info/PKG-INFO -writing dependency_links to src/mega.py.egg-info/dependency_links.txt -writing requirements to src/mega.py.egg-info/requires.txt -writing top-level names to src/mega.py.egg-info/top_level.txt -reading manifest file 'src/mega.py.egg-info/SOURCES.txt' -writing manifest file 'src/mega.py.egg-info/SOURCES.txt' -running check -warning: check: missing required meta-data: url - -creating mega.py-0.9.17 -creating mega.py-0.9.17/src -creating mega.py-0.9.17/src/mega -creating mega.py-0.9.17/src/mega.py.egg-info -copying files to mega.py-0.9.17... -copying README.rst -> mega.py-0.9.17 -copying setup.cfg -> mega.py-0.9.17 -copying setup.py -> mega.py-0.9.17 -copying src/mega/__init__.py -> mega.py-0.9.17/src/mega -copying src/mega/crypto.py -> mega.py-0.9.17/src/mega -copying src/mega/errors.py -> mega.py-0.9.17/src/mega -copying src/mega/mega.py -> mega.py-0.9.17/src/mega -copying src/mega.py.egg-info/PKG-INFO -> mega.py-0.9.17/src/mega.py.egg-info -copying src/mega.py.egg-info/SOURCES.txt -> mega.py-0.9.17/src/mega.py.egg-info -copying src/mega.py.egg-info/dependency_links.txt -> mega.py-0.9.17/src/mega.py.egg-info -copying src/mega.py.egg-info/not-zip-safe -> mega.py-0.9.17/src/mega.py.egg-info -copying src/mega.py.egg-info/requires.txt -> mega.py-0.9.17/src/mega.py.egg-info -copying src/mega.py.egg-info/top_level.txt -> mega.py-0.9.17/src/mega.py.egg-info -Writing mega.py-0.9.17/setup.cfg -creating /home/amnesia/dev/mega.py/.tox/dist -creating '/home/amnesia/dev/mega.py/.tox/dist/mega.py-0.9.17.zip' and adding 'mega.py-0.9.17' to it -adding 'mega.py-0.9.17' -adding 'mega.py-0.9.17/src' -adding 'mega.py-0.9.17/README.rst' -adding 'mega.py-0.9.17/setup.py' -adding 'mega.py-0.9.17/PKG-INFO' -adding 'mega.py-0.9.17/setup.cfg' -adding 'mega.py-0.9.17/src/mega.py.egg-info' -adding 'mega.py-0.9.17/src/mega' -adding 'mega.py-0.9.17/src/mega.py.egg-info/PKG-INFO' -adding 'mega.py-0.9.17/src/mega.py.egg-info/top_level.txt' -adding 'mega.py-0.9.17/src/mega.py.egg-info/dependency_links.txt' -adding 'mega.py-0.9.17/src/mega.py.egg-info/not-zip-safe' -adding 'mega.py-0.9.17/src/mega.py.egg-info/requires.txt' -adding 'mega.py-0.9.17/src/mega.py.egg-info/SOURCES.txt' -adding 'mega.py-0.9.17/src/mega/errors.py' -adding 'mega.py-0.9.17/src/mega/mega.py' -adding 'mega.py-0.9.17/src/mega/crypto.py' -adding 'mega.py-0.9.17/src/mega/__init__.py' -removing 'mega.py-0.9.17' (and everything under it) diff --git a/.tox/py37-normal/bin/activate b/.tox/py37-normal/bin/activate deleted file mode 100644 index 5f1a0f3..0000000 --- a/.tox/py37-normal/bin/activate +++ /dev/null @@ -1,84 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - - -if [ "${BASH_SOURCE-}" = "$0" ]; then - echo "You must source this script: \$ source $0" >&2 - exit 33 -fi - -deactivate () { - unset -f pydoc >/dev/null 2>&1 - - # reset old environment variables - # ! [ -z ${VAR+_} ] returns true if VAR is declared at all - if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then - PATH="$_OLD_VIRTUAL_PATH" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then - PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then - hash -r 2>/dev/null - fi - - if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then - PS1="$_OLD_VIRTUAL_PS1" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - if [ ! "${1-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/home/amnesia/dev/mega.py/.tox/py37-normal" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -if ! [ -z "${PYTHONHOME+_}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1-}" - if [ "x" != x ] ; then - PS1="${PS1-}" - else - PS1="(`basename \"$VIRTUAL_ENV\"`) ${PS1-}" - fi - export PS1 -fi - -# Make sure to unalias pydoc if it's already there -alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true - -pydoc () { - python -m pydoc "$@" -} - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH-}" ] || [ -n "${ZSH_VERSION-}" ] ; then - hash -r 2>/dev/null -fi diff --git a/.tox/py37-normal/bin/activate.csh b/.tox/py37-normal/bin/activate.csh deleted file mode 100644 index 0f1fd38..0000000 --- a/.tox/py37-normal/bin/activate.csh +++ /dev/null @@ -1,55 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . - -set newline='\ -' - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH:q" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT:q" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate && unalias pydoc' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/home/amnesia/dev/mega.py/.tox/py37-normal" - -set _OLD_VIRTUAL_PATH="$PATH:q" -setenv PATH "$VIRTUAL_ENV:q/bin:$PATH:q" - - - -if ("" != "") then - set env_name = "" -else - set env_name = '('"$VIRTUAL_ENV:t:q"') ' -endif - -if ( $?VIRTUAL_ENV_DISABLE_PROMPT ) then - if ( $VIRTUAL_ENV_DISABLE_PROMPT == "" ) then - set do_prompt = "1" - else - set do_prompt = "0" - endif -else - set do_prompt = "1" -endif - -if ( $do_prompt == "1" ) then - # Could be in a non-interactive environment, - # in which case, $prompt is undefined and we wouldn't - # care about the prompt anyway. - if ( $?prompt ) then - set _OLD_VIRTUAL_PROMPT="$prompt:q" - if ( "$prompt:q" =~ *"$newline:q"* ) then - : - else - set prompt = "$env_name:q$prompt:q" - endif - endif -endif - -unset env_name -unset do_prompt - -alias pydoc python -m pydoc - -rehash diff --git a/.tox/py37-normal/bin/activate.fish b/.tox/py37-normal/bin/activate.fish deleted file mode 100644 index 23d75f1..0000000 --- a/.tox/py37-normal/bin/activate.fish +++ /dev/null @@ -1,102 +0,0 @@ -# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*. -# Do not run it directly. - -function _bashify_path -d "Converts a fish path to something bash can recognize" - set fishy_path $argv - set bashy_path $fishy_path[1] - for path_part in $fishy_path[2..-1] - set bashy_path "$bashy_path:$path_part" - end - echo $bashy_path -end - -function _fishify_path -d "Converts a bash path to something fish can recognize" - echo $argv | tr ':' '\n' -end - -function deactivate -d 'Exit virtualenv mode and return to the normal environment.' - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - # https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling - if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 - set -gx PATH (_fishify_path $_OLD_VIRTUAL_PATH) - else - set -gx PATH $_OLD_VIRTUAL_PATH - end - set -e _OLD_VIRTUAL_PATH - end - - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - and functions -q _old_fish_prompt - # Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`. - set -l fish_function_path - - # Erase virtualenv's `fish_prompt` and restore the original. - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - set -e _OLD_FISH_PROMPT_OVERRIDE - end - - set -e VIRTUAL_ENV - - if test "$argv[1]" != 'nondestructive' - # Self-destruct! - functions -e pydoc - functions -e deactivate - functions -e _bashify_path - functions -e _fishify_path - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV "/home/amnesia/dev/mega.py/.tox/py37-normal" - -# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling -if test (echo $FISH_VERSION | tr "." "\n")[1] -lt 3 - set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH) -else - set -gx _OLD_VIRTUAL_PATH $PATH -end -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# Unset `$PYTHONHOME` if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -function pydoc - python -m pydoc $argv -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # Copy the current `fish_prompt` function as `_old_fish_prompt`. - functions -c fish_prompt _old_fish_prompt - - function fish_prompt - # Save the current $status, for fish_prompts that display it. - set -l old_status $status - - # Prompt override provided? - # If not, just prepend the environment name. - if test -n "" - printf '%s%s' "" (set_color normal) - else - printf '%s(%s) ' (set_color normal) (basename "$VIRTUAL_ENV") - end - - # Restore the original $status - echo "exit $old_status" | source - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" -end diff --git a/.tox/py37-normal/bin/activate.ps1 b/.tox/py37-normal/bin/activate.ps1 deleted file mode 100644 index 95504d3..0000000 --- a/.tox/py37-normal/bin/activate.ps1 +++ /dev/null @@ -1,60 +0,0 @@ -$script:THIS_PATH = $myinvocation.mycommand.path -$script:BASE_DIR = Split-Path (Resolve-Path "$THIS_PATH/..") -Parent - -function global:deactivate([switch] $NonDestructive) { - if (Test-Path variable:_OLD_VIRTUAL_PATH) { - $env:PATH = $variable:_OLD_VIRTUAL_PATH - Remove-Variable "_OLD_VIRTUAL_PATH" -Scope global - } - - if (Test-Path function:_old_virtual_prompt) { - $function:prompt = $function:_old_virtual_prompt - Remove-Item function:\_old_virtual_prompt - } - - if ($env:VIRTUAL_ENV) { - Remove-Item env:VIRTUAL_ENV -ErrorAction SilentlyContinue - } - - if (!$NonDestructive) { - # Self destruct! - Remove-Item function:deactivate - Remove-Item function:pydoc - } -} - -function global:pydoc { - python -m pydoc $args -} - -# unset irrelevant variables -deactivate -nondestructive - -$VIRTUAL_ENV = $BASE_DIR -$env:VIRTUAL_ENV = $VIRTUAL_ENV - -New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH - -$env:PATH = "$env:VIRTUAL_ENV/bin:" + $env:PATH -if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) { - function global:_old_virtual_prompt { - "" - } - $function:_old_virtual_prompt = $function:prompt - - if ("" -ne "") { - function global:prompt { - # Add the custom prefix to the existing prompt - $previous_prompt_value = & $function:_old_virtual_prompt - ("" + $previous_prompt_value) - } - } - else { - function global:prompt { - # Add a prefix to the current prompt, but don't discard it. - $previous_prompt_value = & $function:_old_virtual_prompt - $new_prompt_value = "($( Split-Path $env:VIRTUAL_ENV -Leaf )) " - ($new_prompt_value + $previous_prompt_value) - } - } -} diff --git a/.tox/py37-normal/bin/activate.xsh b/.tox/py37-normal/bin/activate.xsh deleted file mode 100644 index 706a922..0000000 --- a/.tox/py37-normal/bin/activate.xsh +++ /dev/null @@ -1,46 +0,0 @@ -"""Xonsh activate script for virtualenv""" -from xonsh.tools import get_sep as _get_sep - -def _deactivate(args): - if "pydoc" in aliases: - del aliases["pydoc"] - - if ${...}.get("_OLD_VIRTUAL_PATH", ""): - $PATH = $_OLD_VIRTUAL_PATH - del $_OLD_VIRTUAL_PATH - - if ${...}.get("_OLD_VIRTUAL_PYTHONHOME", ""): - $PYTHONHOME = $_OLD_VIRTUAL_PYTHONHOME - del $_OLD_VIRTUAL_PYTHONHOME - - if "VIRTUAL_ENV" in ${...}: - del $VIRTUAL_ENV - - if "VIRTUAL_ENV_PROMPT" in ${...}: - del $VIRTUAL_ENV_PROMPT - - if "nondestructive" not in args: - # Self destruct! - del aliases["deactivate"] - - -# unset irrelevant variables -_deactivate(["nondestructive"]) -aliases["deactivate"] = _deactivate - -$VIRTUAL_ENV = r"/home/amnesia/dev/mega.py/.tox/py37-normal" - -$_OLD_VIRTUAL_PATH = $PATH -$PATH = $PATH[:] -$PATH.add($VIRTUAL_ENV + _get_sep() + "bin", front=True, replace=True) - -if ${...}.get("PYTHONHOME", ""): - # unset PYTHONHOME if set - $_OLD_VIRTUAL_PYTHONHOME = $PYTHONHOME - del $PYTHONHOME - -$VIRTUAL_ENV_PROMPT = "" -if not $VIRTUAL_ENV_PROMPT: - del $VIRTUAL_ENV_PROMPT - -aliases["pydoc"] = ["python", "-m", "pydoc"] diff --git a/.tox/py37-normal/bin/activate_this.py b/.tox/py37-normal/bin/activate_this.py deleted file mode 100644 index aa96457..0000000 --- a/.tox/py37-normal/bin/activate_this.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Activate virtualenv for current interpreter: - -Use exec(open(this_file).read(), {'__file__': this_file}). - -This can be used when you must use an existing Python interpreter, not the virtualenv bin/python. -""" -import os -import site -import sys - -try: - __file__ -except NameError: - raise AssertionError("You must use exec(open(this_file).read(), {'__file__': this_file}))") - -# prepend bin to PATH (this file is inside the bin directory) -bin_dir = os.path.dirname(os.path.abspath(__file__)) -os.environ["PATH"] = os.pathsep.join([bin_dir] + os.environ.get("PATH", "").split(os.pathsep)) - -base = os.path.dirname(bin_dir) - -# virtual env is right above bin directory -os.environ["VIRTUAL_ENV"] = base - -# add the virtual environments site-package to the host python import mechanism -IS_PYPY = hasattr(sys, "pypy_version_info") -IS_JYTHON = sys.platform.startswith("java") -if IS_JYTHON: - site_packages = os.path.join(base, "Lib", "site-packages") -elif IS_PYPY: - site_packages = os.path.join(base, "site-packages") -else: - IS_WIN = sys.platform == "win32" - if IS_WIN: - site_packages = os.path.join(base, "Lib", "site-packages") - else: - site_packages = os.path.join(base, "lib", "python{}.{}".format(*sys.version_info), "site-packages") - -prev = set(sys.path) -site.addsitedir(site_packages) -sys.real_prefix = sys.prefix -sys.prefix = base - -# Move the added items to the front of the path, in place -new = list(sys.path) -sys.path[:] = [i for i in new if i not in prev] + [i for i in new if i in prev] diff --git a/.tox/py37-normal/bin/chardetect b/.tox/py37-normal/bin/chardetect deleted file mode 100755 index 239e0cb..0000000 --- a/.tox/py37-normal/bin/chardetect +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from chardet.cli.chardetect import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/easy_install b/.tox/py37-normal/bin/easy_install deleted file mode 100755 index d1ac467..0000000 --- a/.tox/py37-normal/bin/easy_install +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/easy_install-3.7 b/.tox/py37-normal/bin/easy_install-3.7 deleted file mode 100755 index d1ac467..0000000 --- a/.tox/py37-normal/bin/easy_install-3.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from setuptools.command.easy_install import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/pip b/.tox/py37-normal/bin/pip deleted file mode 100755 index b569b4a..0000000 --- a/.tox/py37-normal/bin/pip +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal.main import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/pip3 b/.tox/py37-normal/bin/pip3 deleted file mode 100755 index b569b4a..0000000 --- a/.tox/py37-normal/bin/pip3 +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal.main import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/pip3.7 b/.tox/py37-normal/bin/pip3.7 deleted file mode 100755 index b569b4a..0000000 --- a/.tox/py37-normal/bin/pip3.7 +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from pip._internal.main import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/bin/python b/.tox/py37-normal/bin/python deleted file mode 120000 index 940bee3..0000000 --- a/.tox/py37-normal/bin/python +++ /dev/null @@ -1 +0,0 @@ -python3.7 \ No newline at end of file diff --git a/.tox/py37-normal/bin/python-config b/.tox/py37-normal/bin/python-config deleted file mode 100755 index fce1758..0000000 --- a/.tox/py37-normal/bin/python-config +++ /dev/null @@ -1,78 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python - -import sys -import getopt -import sysconfig - -valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags', - 'ldflags', 'help'] - -if sys.version_info >= (3, 2): - valid_opts.insert(-1, 'extension-suffix') - valid_opts.append('abiflags') -if sys.version_info >= (3, 3): - valid_opts.append('configdir') - - -def exit_with_usage(code=1): - sys.stderr.write("Usage: {0} [{1}]\n".format( - sys.argv[0], '|'.join('--'+opt for opt in valid_opts))) - sys.exit(code) - -try: - opts, args = getopt.getopt(sys.argv[1:], '', valid_opts) -except getopt.error: - exit_with_usage() - -if not opts: - exit_with_usage() - -pyver = sysconfig.get_config_var('VERSION') -getvar = sysconfig.get_config_var - -opt_flags = [flag for (flag, val) in opts] - -if '--help' in opt_flags: - exit_with_usage(code=0) - -for opt in opt_flags: - if opt == '--prefix': - print(sysconfig.get_config_var('prefix')) - - elif opt == '--exec-prefix': - print(sysconfig.get_config_var('exec_prefix')) - - elif opt in ('--includes', '--cflags'): - flags = ['-I' + sysconfig.get_path('include'), - '-I' + sysconfig.get_path('platinclude')] - if opt == '--cflags': - flags.extend(getvar('CFLAGS').split()) - print(' '.join(flags)) - - elif opt in ('--libs', '--ldflags'): - abiflags = getattr(sys, 'abiflags', '') - libs = ['-lpython' + pyver + abiflags] - libs += getvar('LIBS').split() - libs += getvar('SYSLIBS').split() - # add the prefix/lib/pythonX.Y/config dir, but only if there is no - # shared library in prefix/lib/. - if opt == '--ldflags': - if not getvar('Py_ENABLE_SHARED'): - libs.insert(0, '-L' + getvar('LIBPL')) - if not getvar('PYTHONFRAMEWORK'): - libs.extend(getvar('LINKFORSHARED').split()) - print(' '.join(libs)) - - elif opt == '--extension-suffix': - ext_suffix = sysconfig.get_config_var('EXT_SUFFIX') - if ext_suffix is None: - ext_suffix = sysconfig.get_config_var('SO') - print(ext_suffix) - - elif opt == '--abiflags': - if not getattr(sys, 'abiflags', None): - exit_with_usage() - print(sys.abiflags) - - elif opt == '--configdir': - print(sysconfig.get_config_var('LIBPL')) diff --git a/.tox/py37-normal/bin/python3 b/.tox/py37-normal/bin/python3 deleted file mode 120000 index 940bee3..0000000 --- a/.tox/py37-normal/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -python3.7 \ No newline at end of file diff --git a/.tox/py37-normal/bin/python3.7 b/.tox/py37-normal/bin/python3.7 deleted file mode 100755 index 084c57c..0000000 Binary files a/.tox/py37-normal/bin/python3.7 and /dev/null differ diff --git a/.tox/py37-normal/bin/wheel b/.tox/py37-normal/bin/wheel deleted file mode 100755 index 8fa4177..0000000 --- a/.tox/py37-normal/bin/wheel +++ /dev/null @@ -1,10 +0,0 @@ -#!/home/amnesia/dev/mega.py/.tox/py37-normal/bin/python3.7 -# -*- coding: utf-8 -*- -import re -import sys - -from wheel.cli import main - -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/.tox/py37-normal/include/python3.7m b/.tox/py37-normal/include/python3.7m deleted file mode 120000 index 0487f7b..0000000 --- a/.tox/py37-normal/include/python3.7m +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/include/python3.7m \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/LICENSE.txt b/.tox/py37-normal/lib/python3.7/LICENSE.txt deleted file mode 120000 index f87c689..0000000 --- a/.tox/py37-normal/lib/python3.7/LICENSE.txt +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/LICENSE.txt \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/__future__.py b/.tox/py37-normal/lib/python3.7/__future__.py deleted file mode 120000 index c6373af..0000000 --- a/.tox/py37-normal/lib/python3.7/__future__.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/__future__.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/_bootlocale.py b/.tox/py37-normal/lib/python3.7/_bootlocale.py deleted file mode 120000 index 4691028..0000000 --- a/.tox/py37-normal/lib/python3.7/_bootlocale.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/_bootlocale.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/_collections_abc.py b/.tox/py37-normal/lib/python3.7/_collections_abc.py deleted file mode 120000 index b586612..0000000 --- a/.tox/py37-normal/lib/python3.7/_collections_abc.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/_collections_abc.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/_dummy_thread.py b/.tox/py37-normal/lib/python3.7/_dummy_thread.py deleted file mode 120000 index 7e74a76..0000000 --- a/.tox/py37-normal/lib/python3.7/_dummy_thread.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/_dummy_thread.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/_weakrefset.py b/.tox/py37-normal/lib/python3.7/_weakrefset.py deleted file mode 120000 index 9d74648..0000000 --- a/.tox/py37-normal/lib/python3.7/_weakrefset.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/_weakrefset.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/abc.py b/.tox/py37-normal/lib/python3.7/abc.py deleted file mode 120000 index 39ad2e1..0000000 --- a/.tox/py37-normal/lib/python3.7/abc.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/abc.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/base64.py b/.tox/py37-normal/lib/python3.7/base64.py deleted file mode 120000 index fd33e2e..0000000 --- a/.tox/py37-normal/lib/python3.7/base64.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/base64.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/bisect.py b/.tox/py37-normal/lib/python3.7/bisect.py deleted file mode 120000 index 2a95b9b..0000000 --- a/.tox/py37-normal/lib/python3.7/bisect.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/bisect.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/codecs.py b/.tox/py37-normal/lib/python3.7/codecs.py deleted file mode 120000 index 883345b..0000000 --- a/.tox/py37-normal/lib/python3.7/codecs.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/codecs.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/collections b/.tox/py37-normal/lib/python3.7/collections deleted file mode 120000 index a284f60..0000000 --- a/.tox/py37-normal/lib/python3.7/collections +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/collections \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/config-3.7m-x86_64-linux-gnu b/.tox/py37-normal/lib/python3.7/config-3.7m-x86_64-linux-gnu deleted file mode 120000 index 029cfda..0000000 --- a/.tox/py37-normal/lib/python3.7/config-3.7m-x86_64-linux-gnu +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/config-3.7m-x86_64-linux-gnu \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/copy.py b/.tox/py37-normal/lib/python3.7/copy.py deleted file mode 120000 index b94f226..0000000 --- a/.tox/py37-normal/lib/python3.7/copy.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/copy.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/copyreg.py b/.tox/py37-normal/lib/python3.7/copyreg.py deleted file mode 120000 index 87c5514..0000000 --- a/.tox/py37-normal/lib/python3.7/copyreg.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/copyreg.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/distutils/__init__.py b/.tox/py37-normal/lib/python3.7/distutils/__init__.py deleted file mode 100644 index b9b0f24..0000000 --- a/.tox/py37-normal/lib/python3.7/distutils/__init__.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -import sys -import warnings - -# opcode is not a virtualenv module, so we can use it to find the stdlib -# Important! To work on pypy, this must be a module that resides in the -# lib-python/modified-x.y.z directory -import opcode - -dirname = os.path.dirname - -distutils_path = os.path.join(os.path.dirname(opcode.__file__), "distutils") -if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)): - warnings.warn("The virtualenv distutils package at %s appears to be in the same location as the system distutils?") -else: - __path__.insert(0, distutils_path) # noqa: F821 - if sys.version_info < (3, 4): - import imp - - real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ("", "", imp.PKG_DIRECTORY)) - else: - import importlib.machinery - - distutils_path = os.path.join(distutils_path, "__init__.py") - loader = importlib.machinery.SourceFileLoader("_virtualenv_distutils", distutils_path) - if sys.version_info < (3, 5): - import types - - real_distutils = types.ModuleType(loader.name) - else: - import importlib.util - - spec = importlib.util.spec_from_loader(loader.name, loader) - real_distutils = importlib.util.module_from_spec(spec) - loader.exec_module(real_distutils) - - # Copy the relevant attributes - try: - __revision__ = real_distutils.__revision__ - except AttributeError: - pass - __version__ = real_distutils.__version__ - -from distutils import dist, sysconfig # isort:skip - -try: - basestring -except NameError: - basestring = str - -# patch build_ext (distutils doesn't know how to get the libs directory -# path on windows - it hardcodes the paths around the patched sys.prefix) - -if sys.platform == "win32": - from distutils.command.build_ext import build_ext as old_build_ext - - class build_ext(old_build_ext): - def finalize_options(self): - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, basestring): - self.library_dirs = self.library_dirs.split(os.pathsep) - - self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs")) - old_build_ext.finalize_options(self) - - from distutils.command import build_ext as build_ext_module - - build_ext_module.build_ext = build_ext - -# distutils.dist patches: - -old_find_config_files = dist.Distribution.find_config_files - - -def find_config_files(self): - found = old_find_config_files(self) - if os.name == "posix": - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - user_filename = os.path.join(sys.prefix, user_filename) - if os.path.isfile(user_filename): - for item in list(found): - if item.endswith("pydistutils.cfg"): - found.remove(item) - found.append(user_filename) - return found - - -dist.Distribution.find_config_files = find_config_files - -# distutils.sysconfig patches: - -old_get_python_inc = sysconfig.get_python_inc - - -def sysconfig_get_python_inc(plat_specific=0, prefix=None): - if prefix is None: - prefix = sys.real_prefix - return old_get_python_inc(plat_specific, prefix) - - -sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__ -sysconfig.get_python_inc = sysconfig_get_python_inc - -old_get_python_lib = sysconfig.get_python_lib - - -def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None): - if standard_lib and prefix is None: - prefix = sys.real_prefix - return old_get_python_lib(plat_specific, standard_lib, prefix) - - -sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__ -sysconfig.get_python_lib = sysconfig_get_python_lib - -old_get_config_vars = sysconfig.get_config_vars - - -def sysconfig_get_config_vars(*args): - real_vars = old_get_config_vars(*args) - if sys.platform == "win32": - lib_dir = os.path.join(sys.real_prefix, "libs") - if isinstance(real_vars, dict) and "LIBDIR" not in real_vars: - real_vars["LIBDIR"] = lib_dir # asked for all - elif isinstance(real_vars, list) and "LIBDIR" in args: - real_vars = real_vars + [lib_dir] # asked for list - return real_vars - - -sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__ -sysconfig.get_config_vars = sysconfig_get_config_vars diff --git a/.tox/py37-normal/lib/python3.7/distutils/distutils.cfg b/.tox/py37-normal/lib/python3.7/distutils/distutils.cfg deleted file mode 100644 index 1af230e..0000000 --- a/.tox/py37-normal/lib/python3.7/distutils/distutils.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# This is a config file local to this virtualenv installation -# You may include options that will be used by all distutils commands, -# and by easy_install. For instance: -# -# [easy_install] -# find_links = http://mylocalsite diff --git a/.tox/py37-normal/lib/python3.7/encodings b/.tox/py37-normal/lib/python3.7/encodings deleted file mode 120000 index 091891c..0000000 --- a/.tox/py37-normal/lib/python3.7/encodings +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/encodings \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/enum.py b/.tox/py37-normal/lib/python3.7/enum.py deleted file mode 120000 index a4e19d6..0000000 --- a/.tox/py37-normal/lib/python3.7/enum.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/enum.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/fnmatch.py b/.tox/py37-normal/lib/python3.7/fnmatch.py deleted file mode 120000 index c7a1149..0000000 --- a/.tox/py37-normal/lib/python3.7/fnmatch.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/fnmatch.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/functools.py b/.tox/py37-normal/lib/python3.7/functools.py deleted file mode 120000 index 38d7a40..0000000 --- a/.tox/py37-normal/lib/python3.7/functools.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/functools.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/genericpath.py b/.tox/py37-normal/lib/python3.7/genericpath.py deleted file mode 120000 index 2f10ed9..0000000 --- a/.tox/py37-normal/lib/python3.7/genericpath.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/genericpath.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/hashlib.py b/.tox/py37-normal/lib/python3.7/hashlib.py deleted file mode 120000 index eb444d4..0000000 --- a/.tox/py37-normal/lib/python3.7/hashlib.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/hashlib.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/heapq.py b/.tox/py37-normal/lib/python3.7/heapq.py deleted file mode 120000 index a7c8e7d..0000000 --- a/.tox/py37-normal/lib/python3.7/heapq.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/heapq.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/hmac.py b/.tox/py37-normal/lib/python3.7/hmac.py deleted file mode 120000 index 99394d7..0000000 --- a/.tox/py37-normal/lib/python3.7/hmac.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/hmac.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/imp.py b/.tox/py37-normal/lib/python3.7/imp.py deleted file mode 120000 index 952b679..0000000 --- a/.tox/py37-normal/lib/python3.7/imp.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/imp.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/importlib b/.tox/py37-normal/lib/python3.7/importlib deleted file mode 120000 index f64b7cd..0000000 --- a/.tox/py37-normal/lib/python3.7/importlib +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/importlib \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/io.py b/.tox/py37-normal/lib/python3.7/io.py deleted file mode 120000 index 714d8ee..0000000 --- a/.tox/py37-normal/lib/python3.7/io.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/io.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/keyword.py b/.tox/py37-normal/lib/python3.7/keyword.py deleted file mode 120000 index ef79dd3..0000000 --- a/.tox/py37-normal/lib/python3.7/keyword.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/keyword.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/lib-dynload b/.tox/py37-normal/lib/python3.7/lib-dynload deleted file mode 120000 index 258423c..0000000 --- a/.tox/py37-normal/lib/python3.7/lib-dynload +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/lib-dynload \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/linecache.py b/.tox/py37-normal/lib/python3.7/linecache.py deleted file mode 120000 index 53cb751..0000000 --- a/.tox/py37-normal/lib/python3.7/linecache.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/linecache.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/locale.py b/.tox/py37-normal/lib/python3.7/locale.py deleted file mode 120000 index 61dff33..0000000 --- a/.tox/py37-normal/lib/python3.7/locale.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/locale.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/no-global-site-packages.txt b/.tox/py37-normal/lib/python3.7/no-global-site-packages.txt deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/ntpath.py b/.tox/py37-normal/lib/python3.7/ntpath.py deleted file mode 120000 index a481e22..0000000 --- a/.tox/py37-normal/lib/python3.7/ntpath.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/ntpath.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/operator.py b/.tox/py37-normal/lib/python3.7/operator.py deleted file mode 120000 index 7b65a50..0000000 --- a/.tox/py37-normal/lib/python3.7/operator.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/operator.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/orig-prefix.txt b/.tox/py37-normal/lib/python3.7/orig-prefix.txt deleted file mode 100644 index 7baf5dd..0000000 --- a/.tox/py37-normal/lib/python3.7/orig-prefix.txt +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4 \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/os.py b/.tox/py37-normal/lib/python3.7/os.py deleted file mode 120000 index d075c7b..0000000 --- a/.tox/py37-normal/lib/python3.7/os.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/os.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/posixpath.py b/.tox/py37-normal/lib/python3.7/posixpath.py deleted file mode 120000 index 62833e6..0000000 --- a/.tox/py37-normal/lib/python3.7/posixpath.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/posixpath.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/random.py b/.tox/py37-normal/lib/python3.7/random.py deleted file mode 120000 index 7db28da..0000000 --- a/.tox/py37-normal/lib/python3.7/random.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/random.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/re.py b/.tox/py37-normal/lib/python3.7/re.py deleted file mode 120000 index e4a7bc6..0000000 --- a/.tox/py37-normal/lib/python3.7/re.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/re.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/reprlib.py b/.tox/py37-normal/lib/python3.7/reprlib.py deleted file mode 120000 index 847f24f..0000000 --- a/.tox/py37-normal/lib/python3.7/reprlib.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/reprlib.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/rlcompleter.py b/.tox/py37-normal/lib/python3.7/rlcompleter.py deleted file mode 120000 index d9146ea..0000000 --- a/.tox/py37-normal/lib/python3.7/rlcompleter.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/rlcompleter.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/shutil.py b/.tox/py37-normal/lib/python3.7/shutil.py deleted file mode 120000 index 1785aca..0000000 --- a/.tox/py37-normal/lib/python3.7/shutil.py +++ /dev/null @@ -1 +0,0 @@ -/home/amnesia/.pyenv/versions/3.7.4/lib/python3.7/shutil.py \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/AES.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/AES.py deleted file mode 100644 index 14f68d8..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/AES.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/AES.py : AES -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""AES symmetric cipher - -AES `(Advanced Encryption Standard)`__ is a symmetric block cipher standardized -by NIST_ . It has a fixed data block size of 16 bytes. -Its keys can be 128, 192, or 256 bits long. - -AES is very fast and secure, and it is the de facto standard for symmetric -encryption. - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import AES - >>> from Crypto import Random - >>> - >>> key = b'Sixteen byte key' - >>> iv = Random.new().read(AES.block_size) - >>> cipher = AES.new(key, AES.MODE_CFB, iv) - >>> msg = iv + cipher.encrypt(b'Attack at dawn') - -.. __: http://en.wikipedia.org/wiki/Advanced_Encryption_Standard -.. _NIST: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _AES - -class AESCipher (blockalgo.BlockAlgo): - """AES cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize an AES cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _AES, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new AES cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - It must be 16 (*AES-128*), 24 (*AES-192*), or 32 (*AES-256*) bytes long. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - - :Return: an `AESCipher` object - """ - return AESCipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 16 -#: Size of a key (in bytes) -key_size = ( 16, 24, 32 ) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC2.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC2.py deleted file mode 100644 index b5234e6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC2.py +++ /dev/null @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/ARC2.py : ARC2.py -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""RC2 symmetric cipher - -RC2_ (Rivest's Cipher version 2) is a symmetric block cipher designed -by Ron Rivest in 1987. The cipher started as a proprietary design, -that was reverse engineered and anonymously posted on Usenet in 1996. -For this reason, the algorithm was first called *Alleged* RC2 (ARC2), -since the company that owned RC2 (RSA Data Inc.) did not confirm whether -the details leaked into public domain were really correct. - -The company eventually published its full specification in RFC2268_. - -RC2 has a fixed data block size of 8 bytes. Length of its keys can vary from -8 to 128 bits. One particular property of RC2 is that the actual -cryptographic strength of the key (*effective key length*) can be reduced -via a parameter. - -Even though RC2 is not cryptographically broken, it has not been analyzed as -thoroughly as AES, which is also faster than RC2. - -New designs should not use RC2. - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import ARC2 - >>> from Crypto import Random - >>> - >>> key = b'Sixteen byte key' - >>> iv = Random.new().read(ARC2.block_size) - >>> cipher = ARC2.new(key, ARC2.MODE_CFB, iv) - >>> msg = iv + cipher.encrypt(b'Attack at dawn') - -.. _RC2: http://en.wikipedia.org/wiki/RC2 -.. _RFC2268: http://tools.ietf.org/html/rfc2268 - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _ARC2 - -class RC2Cipher (blockalgo.BlockAlgo): - """RC2 cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize an ARC2 cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _ARC2, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new RC2 cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - Its length can vary from 1 to 128 bytes. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - effective_keylen : integer - Maximum cryptographic strength of the key, in bits. - It can vary from 0 to 1024. The default value is 1024. - - :Return: an `RC2Cipher` object - """ - return RC2Cipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 8 -#: Size of a key (in bytes) -key_size = range(1,16+1) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC4.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC4.py deleted file mode 100644 index d83f75b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/ARC4.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/ARC4.py : ARC4 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""ARC4 symmetric cipher - -ARC4_ (Alleged RC4) is an implementation of RC4 (Rivest's Cipher version 4), -a symmetric stream cipher designed by Ron Rivest in 1987. - -The cipher started as a proprietary design, that was reverse engineered and -anonymously posted on Usenet in 1994. The company that owns RC4 (RSA Data -Inc.) never confirmed the correctness of the leaked algorithm. - -Unlike RC2, the company has never published the full specification of RC4, -of whom it still holds the trademark. - -ARC4 keys can vary in length from 40 to 2048 bits. - -One problem of ARC4 is that it does not take a nonce or an IV. If it is required -to encrypt multiple messages with the same long-term key, a distinct -independent nonce must be created for each message, and a short-term key must -be derived from the combination of the long-term key and the nonce. -Due to the weak key scheduling algorithm of RC2, the combination must be carried -out with a complex function (e.g. a cryptographic hash) and not by simply -concatenating key and nonce. - -New designs should not use ARC4. A good alternative is AES -(`Crypto.Cipher.AES`) in any of the modes that turn it into a stream cipher (OFB, CFB, or CTR). - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import ARC4 - >>> from Crypto.Hash import SHA - >>> from Crypto import Random - >>> - >>> key = b'Very long and confidential key' - >>> nonce = Random.new().read(16) - >>> tempkey = SHA.new(key+nonce).digest() - >>> cipher = ARC4.new(tempkey) - >>> msg = nonce + cipher.encrypt(b'Open the pod bay doors, HAL') - -.. _ARC4: http://en.wikipedia.org/wiki/RC4 - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import _ARC4 - -class ARC4Cipher: - """ARC4 cipher object""" - - - def __init__(self, key, *args, **kwargs): - """Initialize an ARC4 cipher object - - See also `new()` at the module level.""" - - self._cipher = _ARC4.new(key, *args, **kwargs) - self.block_size = self._cipher.block_size - self.key_size = self._cipher.key_size - - def encrypt(self, plaintext): - """Encrypt a piece of data. - - :Parameters: - plaintext : byte string - The piece of data to encrypt. It can be of any size. - :Return: the encrypted data (byte string, as long as the - plaintext). - """ - return self._cipher.encrypt(plaintext) - - def decrypt(self, ciphertext): - """Decrypt a piece of data. - - :Parameters: - ciphertext : byte string - The piece of data to decrypt. It can be of any size. - :Return: the decrypted data (byte string, as long as the - ciphertext). - """ - return self._cipher.decrypt(ciphertext) - -def new(key, *args, **kwargs): - """Create a new ARC4 cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - It can have any length, with a minimum of 40 bytes. - Its cryptograpic strength is always capped to 2048 bits (256 bytes). - - :Return: an `ARC4Cipher` object - """ - return ARC4Cipher(key, *args, **kwargs) - -#: Size of a data block (in bytes) -block_size = 1 -#: Size of a key (in bytes) -key_size = range(1,256+1) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/Blowfish.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/Blowfish.py deleted file mode 100644 index 8c81d96..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/Blowfish.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/Blowfish.py : Blowfish -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""Blowfish symmetric cipher - -Blowfish_ is a symmetric block cipher designed by Bruce Schneier. - -It has a fixed data block size of 8 bytes and its keys can vary in length -from 32 to 448 bits (4 to 56 bytes). - -Blowfish is deemed secure and it is fast. However, its keys should be chosen -to be big enough to withstand a brute force attack (e.g. at least 16 bytes). - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import Blowfish - >>> from Crypto import Random - >>> from struct import pack - >>> - >>> bs = Blowfish.block_size - >>> key = b'An arbitrarily long key' - >>> iv = Random.new().read(bs) - >>> cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv) - >>> plaintext = b'docendo discimus ' - >>> plen = bs - divmod(len(plaintext),bs)[1] - >>> padding = [plen]*plen - >>> padding = pack('b'*plen, *padding) - >>> msg = iv + cipher.encrypt(plaintext + padding) - -.. _Blowfish: http://www.schneier.com/blowfish.html - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _Blowfish - -class BlowfishCipher (blockalgo.BlockAlgo): - """Blowfish cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize a Blowfish cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _Blowfish, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new Blowfish cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - Its length can vary from 4 to 56 bytes. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - - :Return: a `BlowfishCipher` object - """ - return BlowfishCipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 8 -#: Size of a key (in bytes) -key_size = range(4,56+1) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/CAST.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/CAST.py deleted file mode 100644 index 89543b2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/CAST.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/CAST.py : CAST -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""CAST-128 symmetric cipher - -CAST-128_ (or CAST5) is a symmetric block cipher specified in RFC2144_. - -It has a fixed data block size of 8 bytes. Its key can vary in length -from 40 to 128 bits. - -CAST is deemed to be cryptographically secure, but its usage is not widespread. -Keys of sufficient length should be used to prevent brute force attacks -(128 bits are recommended). - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import CAST - >>> from Crypto import Random - >>> - >>> key = b'Sixteen byte key' - >>> iv = Random.new().read(CAST.block_size) - >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, iv) - >>> plaintext = b'sona si latine loqueris ' - >>> msg = cipher.encrypt(plaintext) - >>> - ... - >>> eiv = msg[:CAST.block_size+2] - >>> ciphertext = msg[CAST.block_size+2:] - >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, eiv) - >>> print cipher.decrypt(ciphertext) - -.. _CAST-128: http://en.wikipedia.org/wiki/CAST-128 -.. _RFC2144: http://tools.ietf.org/html/rfc2144 - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _CAST - -class CAST128Cipher(blockalgo.BlockAlgo): - """CAST-128 cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize a CAST-128 cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _CAST, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new CAST-128 cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - Its length may vary from 5 to 16 bytes. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - - :Return: an `CAST128Cipher` object - """ - return CAST128Cipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 8 -#: Size of a key (in bytes) -key_size = range(5,16+1) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES.py deleted file mode 100644 index 2fae42f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/DES.py : DES -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""DES symmetric cipher - -DES `(Data Encryption Standard)`__ is a symmetric block cipher standardized -by NIST_ . It has a fixed data block size of 8 bytes. -Its keys are 64 bits long, even though 8 bits were used for integrity (now they -are ignored) and do not contribute to securty. - -DES is cryptographically secure, but its key length is too short by nowadays -standards and it could be brute forced with some effort. - -DES should not be used for new designs. Use `AES`. - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import DES3 - >>> from Crypto import Random - >>> - >>> key = b'Sixteen byte key' - >>> iv = Random.new().read(DES3.block_size) - >>> cipher = DES3.new(key, DES3.MODE_OFB, iv) - >>> plaintext = b'sona si latine loqueris ' - >>> msg = iv + cipher.encrypt(plaintext) - -.. __: http://en.wikipedia.org/wiki/Data_Encryption_Standard -.. _NIST: http://csrc.nist.gov/publications/fips/fips46-3/fips46-3.pdf - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _DES - -class DESCipher(blockalgo.BlockAlgo): - """DES cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize a DES cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _DES, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new DES cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - It must be 8 byte long. The parity bits will be ignored. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - - :Return: an `DESCipher` object - """ - return DESCipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 8 -#: Size of a key (in bytes) -key_size = 8 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES3.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES3.py deleted file mode 100644 index 7fedac8..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/DES3.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/DES3.py : DES3 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""Triple DES symmetric cipher - -`Triple DES`__ (or TDES or TDEA or 3DES) is a symmetric block cipher standardized by NIST_. -It has a fixed data block size of 8 bytes. Its keys are 128 (*Option 1*) or 192 -bits (*Option 2*) long. -However, 1 out of 8 bits is used for redundancy and do not contribute to -security. The effective key length is respectively 112 or 168 bits. - -TDES consists of the concatenation of 3 simple `DES` ciphers. - -The plaintext is first DES encrypted with *K1*, then decrypted with *K2*, -and finally encrypted again with *K3*. The ciphertext is decrypted in the reverse manner. - -The 192 bit key is a bundle of three 64 bit independent subkeys: *K1*, *K2*, and *K3*. - -The 128 bit key is split into *K1* and *K2*, whereas *K1=K3*. - -It is important that all subkeys are different, otherwise TDES would degrade to -single `DES`. - -TDES is cryptographically secure, even though it is neither as secure nor as fast -as `AES`. - -As an example, encryption can be done as follows: - - >>> from Crypto.Cipher import DES - >>> from Crypto import Random - >>> from Crypto.Util import Counter - >>> - >>> key = b'-8B key-' - >>> nonce = Random.new().read(DES.block_size/2) - >>> ctr = Counter.new(DES.block_size*8/2, prefix=nonce) - >>> cipher = DES.new(key, DES.MODE_CTR, counter=ctr) - >>> plaintext = b'We are no longer the knights who say ni!' - >>> msg = nonce + cipher.encrypt(plaintext) - -.. __: http://en.wikipedia.org/wiki/Triple_DES -.. _NIST: http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import blockalgo -from Crypto.Cipher import _DES3 - -class DES3Cipher(blockalgo.BlockAlgo): - """TDES cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize a TDES cipher object - - See also `new()` at the module level.""" - blockalgo.BlockAlgo.__init__(self, _DES3, key, *args, **kwargs) - -def new(key, *args, **kwargs): - """Create a new TDES cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - It must be 16 or 24 bytes long. The parity bits will be ignored. - :Keywords: - mode : a *MODE_** constant - The chaining mode to use for encryption or decryption. - Default is `MODE_ECB`. - IV : byte string - The initialization vector to use for encryption or decryption. - - It is ignored for `MODE_ECB` and `MODE_CTR`. - - For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption - and `block_size` +2 bytes for decryption (in the latter case, it is - actually the *encrypted* IV which was prefixed to the ciphertext). - It is mandatory. - - For all other modes, it must be `block_size` bytes longs. It is optional and - when not present it will be given a default value of all zeroes. - counter : callable - (*Only* `MODE_CTR`). A stateful function that returns the next - *counter block*, which is a byte string of `block_size` bytes. - For better performance, use `Crypto.Util.Counter`. - segment_size : integer - (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext - are segmented in. - It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. - - :Attention: it is important that all 8 byte subkeys are different, - otherwise TDES would degrade to single `DES`. - :Return: an `DES3Cipher` object - """ - return DES3Cipher(key, *args, **kwargs) - -#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. -MODE_ECB = 1 -#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. -MODE_CBC = 2 -#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. -MODE_CFB = 3 -#: This mode should not be used. -MODE_PGP = 4 -#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. -MODE_OFB = 5 -#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. -MODE_CTR = 6 -#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. -MODE_OPENPGP = 7 -#: Size of a data block (in bytes) -block_size = 8 -#: Size of a key (in bytes) -key_size = ( 16, 24 ) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/PKCS1_OAEP.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/PKCS1_OAEP.py deleted file mode 100644 index 2738ce3..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/PKCS1_OAEP.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""RSA encryption protocol according to PKCS#1 OAEP - -See RFC3447__ or the `original RSA Labs specification`__ . - -This scheme is more properly called ``RSAES-OAEP``. - -As an example, a sender may encrypt a message in this way: - - >>> from Crypto.Cipher import PKCS1_OAEP - >>> from Crypto.PublicKey import RSA - >>> - >>> message = 'To be encrypted' - >>> key = RSA.importKey(open('pubkey.der').read()) - >>> cipher = PKCS1_OAEP.new(key) - >>> ciphertext = cipher.encrypt(message) - -At the receiver side, decryption can be done using the private part of -the RSA key: - - >>> key = RSA.importKey(open('privkey.der').read()) - >>> cipher = PKCS1_OAP.new(key) - >>> message = cipher.decrypt(ciphertext) - -:undocumented: __revision__, __package__ - -.. __: http://www.ietf.org/rfc/rfc3447.txt -.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. -""" - - - -__revision__ = "$Id$" -__all__ = [ 'new', 'PKCS1OAEP_Cipher' ] - -import Crypto.Signature.PKCS1_PSS -import Crypto.Hash.SHA - -from Crypto.Util.py3compat import * -import Crypto.Util.number -from Crypto.Util.number import ceil_div -from Crypto.Util.strxor import strxor - -class PKCS1OAEP_Cipher: - """This cipher can perform PKCS#1 v1.5 OAEP encryption or decryption.""" - - def __init__(self, key, hashAlgo, mgfunc, label): - """Initialize this PKCS#1 OAEP cipher object. - - :Parameters: - key : an RSA key object - If a private half is given, both encryption and decryption are possible. - If a public half is given, only encryption is possible. - hashAlgo : hash object - The hash function to use. This can be a module under `Crypto.Hash` - or an existing hash object created from any of such modules. If not specified, - `Crypto.Hash.SHA` (that is, SHA-1) is used. - mgfunc : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - If not specified, the standard MGF1 is used (a safe choice). - label : string - A label to apply to this particular encryption. If not specified, - an empty string is used. Specifying a label does not improve - security. - - :attention: Modify the mask generation function only if you know what you are doing. - Sender and receiver must use the same one. - """ - self._key = key - - if hashAlgo: - self._hashObj = hashAlgo - else: - self._hashObj = Crypto.Hash.SHA - - if mgfunc: - self._mgf = mgfunc - else: - self._mgf = lambda x,y: Crypto.Signature.PKCS1_PSS.MGF1(x,y,self._hashObj) - - self._label = label - - def can_encrypt(self): - """Return True/1 if this cipher object can be used for encryption.""" - return self._key.can_encrypt() - - def can_decrypt(self): - """Return True/1 if this cipher object can be used for decryption.""" - return self._key.can_decrypt() - - def encrypt(self, message): - """Produce the PKCS#1 OAEP encryption of a message. - - This function is named ``RSAES-OAEP-ENCRYPT``, and is specified in - section 7.1.1 of RFC3447. - - :Parameters: - message : string - The message to encrypt, also known as plaintext. It can be of - variable length, but not longer than the RSA modulus (in bytes) - minus 2, minus twice the hash output size. - - :Return: A string, the ciphertext in which the message is encrypted. - It is as long as the RSA modulus (in bytes). - :Raise ValueError: - If the RSA key length is not sufficiently long to deal with the given - message. - """ - # TODO: Verify the key is RSA - - randFunc = self._key._randfunc - - # See 7.1.1 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - hLen = self._hashObj.digest_size - mLen = len(message) - - # Step 1b - ps_len = k-mLen-2*hLen-2 - if ps_len<0: - raise ValueError("Plaintext is too long.") - # Step 2a - lHash = self._hashObj.new(self._label).digest() - # Step 2b - ps = bchr(0x00)*ps_len - # Step 2c - db = lHash + ps + bchr(0x01) + message - # Step 2d - ros = randFunc(hLen) - # Step 2e - dbMask = self._mgf(ros, k-hLen-1) - # Step 2f - maskedDB = strxor(db, dbMask) - # Step 2g - seedMask = self._mgf(maskedDB, hLen) - # Step 2h - maskedSeed = strxor(ros, seedMask) - # Step 2i - em = bchr(0x00) + maskedSeed + maskedDB - # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) - m = self._key.encrypt(em, 0)[0] - # Complete step 3c (I2OSP) - c = bchr(0x00)*(k-len(m)) + m - return c - - def decrypt(self, ct): - """Decrypt a PKCS#1 OAEP ciphertext. - - This function is named ``RSAES-OAEP-DECRYPT``, and is specified in - section 7.1.2 of RFC3447. - - :Parameters: - ct : string - The ciphertext that contains the message to recover. - - :Return: A string, the original message. - :Raise ValueError: - If the ciphertext length is incorrect, or if the decryption does not - succeed. - :Raise TypeError: - If the RSA key has no private half. - """ - # TODO: Verify the key is RSA - - # See 7.1.2 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - hLen = self._hashObj.digest_size - - # Step 1b and 1c - if len(ct) != k or k>> from Crypto.Cipher import PKCS1_v1_5 - >>> from Crypto.PublicKey import RSA - >>> from Crypto.Hash import SHA - >>> - >>> message = 'To be encrypted' - >>> h = SHA.new(message) - >>> - >>> key = RSA.importKey(open('pubkey.der').read()) - >>> cipher = PKCS1_v1_5.new(key) - >>> ciphertext = cipher.encrypt(message+h.digest()) - -At the receiver side, decryption can be done using the private part of -the RSA key: - - >>> From Crypto.Hash import SHA - >>> from Crypto import Random - >>> - >>> key = RSA.importKey(open('privkey.der').read()) - >>> - >>> dsize = SHA.digest_size - >>> sentinel = Random.new().read(15+dsize) # Let's assume that average data length is 15 - >>> - >>> cipher = PKCS1_v1_5.new(key) - >>> message = cipher.decrypt(ciphertext, sentinel) - >>> - >>> digest = SHA.new(message[:-dsize]).digest() - >>> if digest==message[-dsize:]: # Note how we DO NOT look for the sentinel - >>> print "Encryption was correct." - >>> else: - >>> print "Encryption was not correct." - -:undocumented: __revision__, __package__ - -.. __: http://www.ietf.org/rfc/rfc3447.txt -.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. -""" - -__revision__ = "$Id$" -__all__ = [ 'new', 'PKCS115_Cipher' ] - -from Crypto.Util.number import ceil_div -from Crypto.Util.py3compat import * -import Crypto.Util.number - -class PKCS115_Cipher: - """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption.""" - - def __init__(self, key): - """Initialize this PKCS#1 v1.5 cipher object. - - :Parameters: - key : an RSA key object - If a private half is given, both encryption and decryption are possible. - If a public half is given, only encryption is possible. - """ - self._key = key - - def can_encrypt(self): - """Return True if this cipher object can be used for encryption.""" - return self._key.can_encrypt() - - def can_decrypt(self): - """Return True if this cipher object can be used for decryption.""" - return self._key.can_decrypt() - - def encrypt(self, message): - """Produce the PKCS#1 v1.5 encryption of a message. - - This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and is specified in - section 7.2.1 of RFC3447. - For a complete example see `Crypto.Cipher.PKCS1_v1_5`. - - :Parameters: - message : byte string - The message to encrypt, also known as plaintext. It can be of - variable length, but not longer than the RSA modulus (in bytes) minus 11. - - :Return: A byte string, the ciphertext in which the message is encrypted. - It is as long as the RSA modulus (in bytes). - :Raise ValueError: - If the RSA key length is not sufficiently long to deal with the given - message. - - """ - # TODO: Verify the key is RSA - - randFunc = self._key._randfunc - - # See 7.2.1 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - mLen = len(message) - - # Step 1 - if mLen > k-11: - raise ValueError("Plaintext is too long.") - # Step 2a - class nonZeroRandByte: - def __init__(self, rf): self.rf=rf - def __call__(self, c): - while bord(c)==0x00: c=self.rf(1)[0] - return c - ps = tobytes(list(map(nonZeroRandByte(randFunc), randFunc(k-mLen-3)))) - # Step 2b - em = b('\x00\x02') + ps + bchr(0x00) + message - # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) - m = self._key.encrypt(em, 0)[0] - # Complete step 3c (I2OSP) - c = bchr(0x00)*(k-len(m)) + m - return c - - def decrypt(self, ct, sentinel): - """Decrypt a PKCS#1 v1.5 ciphertext. - - This function is named ``RSAES-PKCS1-V1_5-DECRYPT``, and is specified in - section 7.2.2 of RFC3447. - For a complete example see `Crypto.Cipher.PKCS1_v1_5`. - - :Parameters: - ct : byte string - The ciphertext that contains the message to recover. - sentinel : any type - The object to return to indicate that an error was detected during decryption. - - :Return: A byte string. It is either the original message or the ``sentinel`` (in case of an error). - :Raise ValueError: - If the ciphertext length is incorrect - :Raise TypeError: - If the RSA key has no private half. - - :attention: - You should **never** let the party who submitted the ciphertext know that - this function returned the ``sentinel`` value. - Armed with such knowledge (for a fair amount of carefully crafted but invalid ciphertexts), - an attacker is able to recontruct the plaintext of any other encryption that were carried out - with the same RSA public key (see `Bleichenbacher's`__ attack). - - In general, it should not be possible for the other party to distinguish - whether processing at the server side failed because the value returned - was a ``sentinel`` as opposed to a random, invalid message. - - In fact, the second option is not that unlikely: encryption done according to PKCS#1 v1.5 - embeds no good integrity check. There is roughly one chance - in 2^16 for a random ciphertext to be returned as a valid message - (although random looking). - - It is therefore advisabled to: - - 1. Select as ``sentinel`` a value that resembles a plausable random, invalid message. - 2. Not report back an error as soon as you detect a ``sentinel`` value. - Put differently, you should not explicitly check if the returned value is the ``sentinel`` or not. - 3. Cover all possible errors with a single, generic error indicator. - 4. Embed into the definition of ``message`` (at the protocol level) a digest (e.g. ``SHA-1``). - It is recommended for it to be the rightmost part ``message``. - 5. Where possible, monitor the number of errors due to ciphertexts originating from the same party, - and slow down the rate of the requests from such party (or even blacklist it altogether). - - **If you are designing a new protocol, consider using the more robust PKCS#1 OAEP.** - - .. __: http://www.bell-labs.com/user/bleichen/papers/pkcs.ps - - """ - - # TODO: Verify the key is RSA - - # See 7.2.1 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - - # Step 1 - if len(ct) != k: - raise ValueError("Ciphertext with incorrect length.") - # Step 2a (O2SIP), 2b (RSADP), and part of 2c (I2OSP) - m = self._key.decrypt(ct) - # Complete step 2c (I2OSP) - em = bchr(0x00)*(k-len(m)) + m - # Step 3 - sep = em.find(bchr(0x00),2) - if not em.startswith(b('\x00\x02')) or sep<10: - return sentinel - # Step 4 - return em[sep+1:] - -def new(key): - """Return a cipher object `PKCS115_Cipher` that can be used to perform PKCS#1 v1.5 encryption or decryption. - - :Parameters: - key : RSA key object - The key to use to encrypt or decrypt the message. This is a `Crypto.PublicKey.RSA` object. - Decryption is only possible if *key* is a private RSA key. - - """ - return PKCS115_Cipher(key) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/XOR.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/XOR.py deleted file mode 100644 index 46b8464..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/XOR.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/XOR.py : XOR -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""XOR toy cipher - -XOR is one the simplest stream ciphers. Encryption and decryption are -performed by XOR-ing data with a keystream made by contatenating -the key. - -Do not use it for real applications! - -:undocumented: __revision__, __package__ -""" - -__revision__ = "$Id$" - -from Crypto.Cipher import _XOR - -class XORCipher: - """XOR cipher object""" - - def __init__(self, key, *args, **kwargs): - """Initialize a XOR cipher object - - See also `new()` at the module level.""" - self._cipher = _XOR.new(key, *args, **kwargs) - self.block_size = self._cipher.block_size - self.key_size = self._cipher.key_size - - def encrypt(self, plaintext): - """Encrypt a piece of data. - - :Parameters: - plaintext : byte string - The piece of data to encrypt. It can be of any size. - :Return: the encrypted data (byte string, as long as the - plaintext). - """ - return self._cipher.encrypt(plaintext) - - def decrypt(self, ciphertext): - """Decrypt a piece of data. - - :Parameters: - ciphertext : byte string - The piece of data to decrypt. It can be of any size. - :Return: the decrypted data (byte string, as long as the - ciphertext). - """ - return self._cipher.decrypt(ciphertext) - -def new(key, *args, **kwargs): - """Create a new XOR cipher - - :Parameters: - key : byte string - The secret key to use in the symmetric cipher. - Its length may vary from 1 to 32 bytes. - - :Return: an `XORCipher` object - """ - return XORCipher(key, *args, **kwargs) - -#: Size of a data block (in bytes) -block_size = 1 -#: Size of a key (in bytes) -key_size = range(1,32+1) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_AES.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_AES.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index dbcdb36..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_AES.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC2.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC2.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index b249a1f..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC2.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC4.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC4.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index e9297fb..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_ARC4.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_Blowfish.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_Blowfish.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 389dbc2..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_Blowfish.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_CAST.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_CAST.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 48b6287..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_CAST.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 4311a33..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES3.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES3.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index eff42e9..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_DES3.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_XOR.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_XOR.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 38913d7..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/_XOR.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/__init__.py deleted file mode 100644 index 7afed2d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Symmetric- and asymmetric-key encryption algorithms. - -Encryption algorithms transform plaintext in some way that -is dependent on a key or key pair, producing ciphertext. - -Symmetric algorithms --------------------- - -Encryption can easily be reversed, if (and, hopefully, only if) -one knows the same key. -In other words, sender and receiver share the same key. - -The symmetric encryption modules here all support the interface described in PEP -272, "API for Block Encryption Algorithms". - -If you don't know which algorithm to choose, use AES because it's -standard and has undergone a fair bit of examination. - -======================== ======= ======================== -Module name Type Description -======================== ======= ======================== -`Crypto.Cipher.AES` Block Advanced Encryption Standard -`Crypto.Cipher.ARC2` Block Alleged RC2 -`Crypto.Cipher.ARC4` Stream Alleged RC4 -`Crypto.Cipher.Blowfish` Block Blowfish -`Crypto.Cipher.CAST` Block CAST -`Crypto.Cipher.DES` Block The Data Encryption Standard. - Very commonly used in the past, - but today its 56-bit keys are too small. -`Crypto.Cipher.DES3` Block Triple DES. -`Crypto.Cipher.XOR` Stream The simple XOR cipher. -======================== ======= ======================== - - -Asymmetric algorithms ---------------------- - -For asymmetric algorithms, the key to be used for decryption is totally -different and cannot be derived in a feasible way from the key used -for encryption. Put differently, sender and receiver each own one half -of a key pair. The encryption key is often called ``public`` whereas -the decryption key is called ``private``. - -========================== ======================= -Module name Description -========================== ======================= -`Crypto.Cipher.PKCS1_v1_5` PKCS#1 v1.5 encryption, based on RSA key pairs -`Crypto.Cipher.PKCS1_OAEP` PKCS#1 OAEP encryption, based on RSA key pairs -========================== ======================= - -:undocumented: __revision__, __package__, _AES, _ARC2, _ARC4, _Blowfish - _CAST, _DES, _DES3, _XOR -""" - -__all__ = ['AES', 'ARC2', 'ARC4', - 'Blowfish', 'CAST', 'DES', 'DES3', - 'XOR', - 'PKCS1_v1_5', 'PKCS1_OAEP' - ] - -__revision__ = "$Id$" - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/blockalgo.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/blockalgo.py deleted file mode 100644 index dd183dc..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Cipher/blockalgo.py +++ /dev/null @@ -1,296 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Cipher/blockalgo.py -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""Module with definitions common to all block ciphers.""" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -#: *Electronic Code Book (ECB)*. -#: This is the simplest encryption mode. Each of the plaintext blocks -#: is directly encrypted into a ciphertext block, independently of -#: any other block. This mode exposes frequency of symbols -#: in your plaintext. Other modes (e.g. *CBC*) should be used instead. -#: -#: See `NIST SP800-38A`_ , Section 6.1 . -#: -#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf -MODE_ECB = 1 - -#: *Cipher-Block Chaining (CBC)*. Each of the ciphertext blocks depends -#: on the current and all previous plaintext blocks. An Initialization Vector -#: (*IV*) is required. -#: -#: The *IV* is a data block to be transmitted to the receiver. -#: The *IV* can be made public, but it must be authenticated by the receiver and -#: it should be picked randomly. -#: -#: See `NIST SP800-38A`_ , Section 6.2 . -#: -#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf -MODE_CBC = 2 - -#: *Cipher FeedBack (CFB)*. This mode is similar to CBC, but it transforms -#: the underlying block cipher into a stream cipher. Plaintext and ciphertext -#: are processed in *segments* of **s** bits. The mode is therefore sometimes -#: labelled **s**-bit CFB. An Initialization Vector (*IV*) is required. -#: -#: When encrypting, each ciphertext segment contributes to the encryption of -#: the next plaintext segment. -#: -#: This *IV* is a data block to be transmitted to the receiver. -#: The *IV* can be made public, but it should be picked randomly. -#: Reusing the same *IV* for encryptions done with the same key lead to -#: catastrophic cryptographic failures. -#: -#: See `NIST SP800-38A`_ , Section 6.3 . -#: -#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf -MODE_CFB = 3 - -#: This mode should not be used. -MODE_PGP = 4 - -#: *Output FeedBack (OFB)*. This mode is very similar to CBC, but it -#: transforms the underlying block cipher into a stream cipher. -#: The keystream is the iterated block encryption of an Initialization Vector (*IV*). -#: -#: The *IV* is a data block to be transmitted to the receiver. -#: The *IV* can be made public, but it should be picked randomly. -#: -#: Reusing the same *IV* for encryptions done with the same key lead to -#: catastrophic cryptograhic failures. -#: -#: See `NIST SP800-38A`_ , Section 6.4 . -#: -#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf -MODE_OFB = 5 - -#: *CounTeR (CTR)*. This mode is very similar to ECB, in that -#: encryption of one block is done independently of all other blocks. -#: Unlike ECB, the block *position* contributes to the encryption and no -#: information leaks about symbol frequency. -#: -#: Each message block is associated to a *counter* which must be unique -#: across all messages that get encrypted with the same key (not just within -#: the same message). The counter is as big as the block size. -#: -#: Counters can be generated in several ways. The most straightword one is -#: to choose an *initial counter block* (which can be made public, similarly -#: to the *IV* for the other modes) and increment its lowest **m** bits by -#: one (modulo *2^m*) for each block. In most cases, **m** is chosen to be half -#: the block size. -#: -#: Reusing the same *initial counter block* for encryptions done with the same -#: key lead to catastrophic cryptograhic failures. -#: -#: See `NIST SP800-38A`_ , Section 6.5 (for the mode) and Appendix B (for how -#: to manage the *initial counter block*). -#: -#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf -MODE_CTR = 6 - -#: OpenPGP. This mode is a variant of CFB, and it is only used in PGP and OpenPGP_ applications. -#: An Initialization Vector (*IV*) is required. -#: -#: Unlike CFB, the IV is not transmitted to the receiver. Instead, the *encrypted* IV is. -#: The IV is a random data block. Two of its bytes are duplicated to act as a checksum -#: for the correctness of the key. The encrypted IV is therefore 2 bytes longer than -#: the clean IV. -#: -#: .. _OpenPGP: http://tools.ietf.org/html/rfc4880 -MODE_OPENPGP = 7 - -def _getParameter(name, index, args, kwargs, default=None): - """Find a parameter in tuple and dictionary arguments a function receives""" - param = kwargs.get(name) - if len(args)>index: - if param: - raise ValueError("Parameter '%s' is specified twice" % name) - param = args[index] - return param or default - -class BlockAlgo: - """Class modelling an abstract block cipher.""" - - def __init__(self, factory, key, *args, **kwargs): - self.mode = _getParameter('mode', 0, args, kwargs, default=MODE_ECB) - self.block_size = factory.block_size - - if self.mode != MODE_OPENPGP: - self._cipher = factory.new(key, *args, **kwargs) - self.IV = self._cipher.IV - else: - # OPENPGP mode. For details, see 13.9 in RCC4880. - # - # A few members are specifically created for this mode: - # - _encrypted_iv, set in this constructor - # - _done_first_block, set to True after the first encryption - # - _done_last_block, set to True after a partial block is processed - - self._done_first_block = False - self._done_last_block = False - self.IV = _getParameter('iv', 1, args, kwargs) - if not self.IV: - raise ValueError("MODE_OPENPGP requires an IV") - - # Instantiate a temporary cipher to process the IV - IV_cipher = factory.new(key, MODE_CFB, - b('\x00')*self.block_size, # IV for CFB - segment_size=self.block_size*8) - - # The cipher will be used for... - if len(self.IV) == self.block_size: - # ... encryption - self._encrypted_IV = IV_cipher.encrypt( - self.IV + self.IV[-2:] + # Plaintext - b('\x00')*(self.block_size-2) # Padding - )[:self.block_size+2] - elif len(self.IV) == self.block_size+2: - # ... decryption - self._encrypted_IV = self.IV - self.IV = IV_cipher.decrypt(self.IV + # Ciphertext - b('\x00')*(self.block_size-2) # Padding - )[:self.block_size+2] - if self.IV[-2:] != self.IV[-4:-2]: - raise ValueError("Failed integrity check for OPENPGP IV") - self.IV = self.IV[:-2] - else: - raise ValueError("Length of IV must be %d or %d bytes for MODE_OPENPGP" - % (self.block_size, self.block_size+2)) - - # Instantiate the cipher for the real PGP data - self._cipher = factory.new(key, MODE_CFB, - self._encrypted_IV[-self.block_size:], - segment_size=self.block_size*8) - - def encrypt(self, plaintext): - """Encrypt data with the key and the parameters set at initialization. - - The cipher object is stateful; encryption of a long block - of data can be broken up in two or more calls to `encrypt()`. - That is, the statement: - - >>> c.encrypt(a) + c.encrypt(b) - - is always equivalent to: - - >>> c.encrypt(a+b) - - That also means that you cannot reuse an object for encrypting - or decrypting other data with the same key. - - This function does not perform any padding. - - - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *plaintext* length - (in bytes) must be a multiple of *block_size*. - - - For `MODE_CFB`, *plaintext* length (in bytes) must be a multiple - of *segment_size*/8. - - - For `MODE_CTR`, *plaintext* can be of any length. - - - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, - unless it is the last chunk of the message. - - :Parameters: - plaintext : byte string - The piece of data to encrypt. - :Return: - the encrypted data, as a byte string. It is as long as - *plaintext* with one exception: when encrypting the first message - chunk with `MODE_OPENPGP`, the encypted IV is prepended to the - returned ciphertext. - """ - - if self.mode == MODE_OPENPGP: - padding_length = (self.block_size - len(plaintext) % self.block_size) % self.block_size - if padding_length>0: - # CFB mode requires ciphertext to have length multiple of block size, - # but PGP mode allows the last block to be shorter - if self._done_last_block: - raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", - self.block_size) - self._done_last_block = True - padded = plaintext + b('\x00')*padding_length - res = self._cipher.encrypt(padded)[:len(plaintext)] - else: - res = self._cipher.encrypt(plaintext) - if not self._done_first_block: - res = self._encrypted_IV + res - self._done_first_block = True - return res - - return self._cipher.encrypt(plaintext) - - def decrypt(self, ciphertext): - """Decrypt data with the key and the parameters set at initialization. - - The cipher object is stateful; decryption of a long block - of data can be broken up in two or more calls to `decrypt()`. - That is, the statement: - - >>> c.decrypt(a) + c.decrypt(b) - - is always equivalent to: - - >>> c.decrypt(a+b) - - That also means that you cannot reuse an object for encrypting - or decrypting other data with the same key. - - This function does not perform any padding. - - - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *ciphertext* length - (in bytes) must be a multiple of *block_size*. - - - For `MODE_CFB`, *ciphertext* length (in bytes) must be a multiple - of *segment_size*/8. - - - For `MODE_CTR`, *ciphertext* can be of any length. - - - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, - unless it is the last chunk of the message. - - :Parameters: - ciphertext : byte string - The piece of data to decrypt. - :Return: the decrypted data (byte string, as long as *ciphertext*). - """ - if self.mode == MODE_OPENPGP: - padding_length = (self.block_size - len(ciphertext) % self.block_size) % self.block_size - if padding_length>0: - # CFB mode requires ciphertext to have length multiple of block size, - # but PGP mode allows the last block to be shorter - if self._done_last_block: - raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", - self.block_size) - self._done_last_block = True - padded = ciphertext + b('\x00')*padding_length - res = self._cipher.decrypt(padded)[:len(ciphertext)] - else: - res = self._cipher.decrypt(ciphertext) - return res - - return self._cipher.decrypt(ciphertext) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/HMAC.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/HMAC.py deleted file mode 100644 index 324f534..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/HMAC.py +++ /dev/null @@ -1,212 +0,0 @@ -# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. -# -# =================================================================== -# Portions Copyright (c) 2001, 2002, 2003 Python Software Foundation; -# All Rights Reserved -# -# This file contains code from the Python 2.2 hmac.py module (the -# "Original Code"), with modifications made after it was incorporated -# into PyCrypto (the "Modifications"). -# -# To the best of our knowledge, the Python Software Foundation is the -# copyright holder of the Original Code, and has licensed it under the -# Python 2.2 license. See the file LEGAL/copy/LICENSE.python-2.2 for -# details. -# -# The Modifications to this file are dedicated to the public domain. -# To the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. No rights are -# reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - -"""HMAC (Hash-based Message Authentication Code) algorithm - -HMAC is a MAC defined in RFC2104_ and FIPS-198_ and constructed using -a cryptograpic hash algorithm. -It is usually named *HMAC-X*, where *X* is the hash algorithm; for -instance *HMAC-SHA1* or *HMAC-MD5*. - -The strength of an HMAC depends on: - - - the strength of the hash algorithm - - the length and entropy of the secret key - -An example of possible usage is the following: - - >>> from Crypto.Hash import HMAC - >>> - >>> secret = b'Swordfish' - >>> h = HMAC.new(secret) - >>> h.update(b'Hello') - >>> print h.hexdigest() - -.. _RFC2104: http://www.ietf.org/rfc/rfc2104.txt -.. _FIPS-198: http://csrc.nist.gov/publications/fips/fips198/fips-198a.pdf -""" - -# This is just a copy of the Python 2.2 HMAC module, modified to work when -# used on versions of Python before 2.2. - -__revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'HMAC' ] - -from Crypto.Util.strxor import strxor_c -from Crypto.Util.py3compat import * - -#: The size of the authentication tag produced by the MAC. -#: It matches the digest size on the underlying -#: hashing module used. -digest_size = None - -class HMAC: - """Class that implements HMAC""" - - #: The size of the authentication tag produced by the MAC. - #: It matches the digest size on the underlying - #: hashing module used. - digest_size = None - - def __init__(self, key, msg = None, digestmod = None): - """Create a new HMAC object. - - :Parameters: - key : byte string - secret key for the MAC object. - It must be long enough to match the expected security level of the - MAC. However, there is no benefit in using keys longer than the - `digest_size` of the underlying hash algorithm. - msg : byte string - The very first chunk of the message to authenticate. - It is equivalent to an early call to `update()`. Optional. - :Parameter digestmod: - The hash algorithm the HMAC is based on. - Default is `Crypto.Hash.MD5`. - :Type digestmod: - A hash module or object instantiated from `Crypto.Hash` - """ - if digestmod is None: - from . import MD5 - digestmod = MD5 - - self.digestmod = digestmod - self.outer = digestmod.new() - self.inner = digestmod.new() - try: - self.digest_size = digestmod.digest_size - except AttributeError: - self.digest_size = len(self.outer.digest()) - - try: - # The block size is 128 bytes for SHA384 and SHA512 and 64 bytes - # for the others hash function - blocksize = digestmod.block_size - except AttributeError: - blocksize = 64 - - ipad = 0x36 - opad = 0x5C - - if len(key) > blocksize: - key = digestmod.new(key).digest() - - key = key + bchr(0) * (blocksize - len(key)) - self.outer.update(strxor_c(key, opad)) - self.inner.update(strxor_c(key, ipad)) - if (msg): - self.update(msg) - - def update(self, msg): - """Continue authentication of a message by consuming the next chunk of data. - - Repeated calls are equivalent to a single call with the concatenation - of all the arguments. In other words: - - >>> m.update(a); m.update(b) - - is equivalent to: - - >>> m.update(a+b) - - :Parameters: - msg : byte string - The next chunk of the message being authenticated - """ - - self.inner.update(msg) - - def copy(self): - """Return a copy ("clone") of the MAC object. - - The copy will have the same internal state as the original MAC - object. - This can be used to efficiently compute the MAC of strings that - share a common initial substring. - - :Returns: An `HMAC` object - """ - other = HMAC(b("")) - other.digestmod = self.digestmod - other.inner = self.inner.copy() - other.outer = self.outer.copy() - return other - - def digest(self): - """Return the **binary** (non-printable) MAC of the message that has - been authenticated so far. - - This method does not change the state of the MAC object. - You can continue updating the object after calling this function. - - :Return: A byte string of `digest_size` bytes. It may contain non-ASCII - characters, including null bytes. - """ - h = self.outer.copy() - h.update(self.inner.digest()) - return h.digest() - - def hexdigest(self): - """Return the **printable** MAC of the message that has been - authenticated so far. - - This method does not change the state of the MAC object. - - :Return: A string of 2* `digest_size` bytes. It contains only - hexadecimal ASCII digits. - """ - return "".join(["%02x" % bord(x) - for x in tuple(self.digest())]) - -def new(key, msg = None, digestmod = None): - """Create a new HMAC object. - - :Parameters: - key : byte string - key for the MAC object. - It must be long enough to match the expected security level of the - MAC. However, there is no benefit in using keys longer than the - `digest_size` of the underlying hash algorithm. - msg : byte string - The very first chunk of the message to authenticate. - It is equivalent to an early call to `HMAC.update()`. - Optional. - :Parameter digestmod: - The hash to use to implement the HMAC. Default is `Crypto.Hash.MD5`. - :Type digestmod: - A hash module or instantiated object from `Crypto.Hash` - :Returns: An `HMAC` object - """ - return HMAC(key, msg, digestmod) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD2.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD2.py deleted file mode 100644 index dac959e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD2.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""MD2 cryptographic hash algorithm. - -MD2 is specified in RFC1319_ and it produces the 128 bit digest of a message. - - >>> from Crypto.Hash import MD2 - >>> - >>> h = MD2.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -MD2 stand for Message Digest version 2, and it was invented by Rivest in 1989. - -This algorithm is both slow and insecure. Do not use it for new designs. - -.. _RFC1319: http://tools.ietf.org/html/rfc1319 -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'MD2Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -import Crypto.Hash._MD2 as _MD2 -hashFactory = _MD2 - -class MD2Hash(HashAlgo): - """Class that implements an MD2 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-md2 OBJECT IDENTIFIER ::= { - #: iso(1) member-body(2) us(840) rsadsi(113549) - #: digestAlgorithm(2) 2 - #: } - #: - #: This value uniquely identifies the MD2 algorithm. - oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02') - - digest_size = 16 - block_size = 16 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return MD2Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `MD2Hash.update()`. - Optional. - - :Return: An `MD2Hash` object - """ - return MD2Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = MD2Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = MD2Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD4.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD4.py deleted file mode 100644 index e28a201..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD4.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""MD4 cryptographic hash algorithm. - -MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. - - >>> from Crypto.Hash import MD4 - >>> - >>> h = MD4.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. - -This algorithm is insecure. Do not use it for new designs. - -.. _RFC1320: http://tools.ietf.org/html/rfc1320 -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'MD4Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -import Crypto.Hash._MD4 as _MD4 -hashFactory = _MD4 - -class MD4Hash(HashAlgo): - """Class that implements an MD4 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-md2 OBJECT IDENTIFIER ::= { - #: iso(1) member-body(2) us(840) rsadsi(113549) - #: digestAlgorithm(2) 4 - #: } - #: - #: This value uniquely identifies the MD4 algorithm. - oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04') - - digest_size = 16 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return MD4Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `MD4Hash.update()`. - Optional. - - :Return: A `MD4Hash` object - """ - return MD4Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = MD4Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = MD4Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD5.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD5.py deleted file mode 100644 index 18e9e7b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/MD5.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""MD5 cryptographic hash algorithm. - -MD5 is specified in RFC1321_ and produces the 128 bit digest of a message. - - >>> from Crypto.Hash import MD5 - >>> - >>> h = MD5.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -MD5 stand for Message Digest version 5, and it was invented by Rivest in 1991. - -This algorithm is insecure. Do not use it for new designs. - -.. _RFC1321: http://tools.ietf.org/html/rfc1321 -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'MD5Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - # The md5 module is deprecated in Python 2.6, so use hashlib when possible. - import hashlib - hashFactory = hashlib.md5 - -except ImportError: - import md5 - hashFactory = md5 - -class MD5Hash(HashAlgo): - """Class that implements an MD5 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-md5 OBJECT IDENTIFIER ::= { - #: iso(1) member-body(2) us(840) rsadsi(113549) - #: digestAlgorithm(2) 5 - #: } - #: - #: This value uniquely identifies the MD5 algorithm. - oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05') - - digest_size = 16 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return MD5Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `MD5Hash.update()`. - Optional. - - :Return: A `MD5Hash` object - """ - return MD5Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = MD5Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = MD5Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/RIPEMD.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/RIPEMD.py deleted file mode 100644 index 33099cb..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/RIPEMD.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""RIPEMD-160 cryptographic hash algorithm. - -RIPEMD-160_ produces the 160 bit digest of a message. - - >>> from Crypto.Hash import RIPEMD - >>> - >>> h = RIPEMD.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -RIPEMD-160 stands for RACE Integrity Primitives Evaluation Message Digest -with a 160 bit digest. It was invented by Dobbertin, Bosselaers, and Preneel. - -This algorithm is considered secure, although it has not been scrutinized as -extensively as SHA-1. Moreover, it provides an informal security level of just -80bits. - -.. _RIPEMD-160: http://homes.esat.kuleuven.be/~bosselae/ripemd160.html -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'RIPEMD160Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -import Crypto.Hash._RIPEMD160 as _RIPEMD160 -hashFactory = _RIPEMD160 - -class RIPEMD160Hash(HashAlgo): - """Class that implements a RIPMD-160 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-ripemd160 OBJECT IDENTIFIER ::= { - #: iso(1) identified-organization(3) teletrust(36) - #: algorithm(3) hashAlgorithm(2) ripemd160(1) - #: } - #: - #: This value uniquely identifies the RIPMD-160 algorithm. - oid = b("\x06\x05\x2b\x24\x03\x02\x01") - - digest_size = 20 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return RIPEMD160Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `RIPEMD160Hash.update()`. - Optional. - - :Return: A `RIPEMD160Hash` object - """ - return RIPEMD160Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = RIPEMD160Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = RIPEMD160Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA.py deleted file mode 100644 index 0bc5917..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""SHA-1 cryptographic hash algorithm. - -SHA-1_ produces the 160 bit digest of a message. - - >>> from Crypto.Hash import SHA - >>> - >>> h = SHA.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -*SHA* stands for Secure Hash Algorithm. - -This algorithm is not considered secure. Do not use it for new designs. - -.. _SHA-1: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'SHA1Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - # The sha module is deprecated in Python 2.6, so use hashlib when possible. - import hashlib - hashFactory = hashlib.sha1 - -except ImportError: - import sha - hashFactory = sha - -class SHA1Hash(HashAlgo): - """Class that implements a SHA-1 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-sha1 OBJECT IDENTIFIER ::= { - #: iso(1) identified-organization(3) oiw(14) secsig(3) - #: algorithms(2) 26 - #: } - #: - #: This value uniquely identifies the SHA-1 algorithm. - oid = b('\x06\x05\x2b\x0e\x03\x02\x1a') - - digest_size = 20 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return SHA1Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `SHA1Hash.update()`. - Optional. - - :Return: A `SHA1Hash` object - """ - return SHA1Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = SHA1Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = SHA1Hash.block_size - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA224.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA224.py deleted file mode 100644 index 959b56d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA224.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""SHA-224 cryptographic hash algorithm. - -SHA-224 belongs to the SHA-2_ family of cryptographic hashes. -It produces the 224 bit digest of a message. - - >>> from Crypto.Hash import SHA224 - >>> - >>> h = SHA224.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -*SHA* stands for Secure Hash Algorithm. - -.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'SHA224Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - import hashlib - hashFactory = hashlib.sha224 - -except ImportError: - from Crypto.Hash import _SHA224 - hashFactory = _SHA224 - -class SHA224Hash(HashAlgo): - """Class that implements a SHA-224 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-sha224 OBJECT IDENTIFIER ::= { - #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) - #: nistalgorithm(4) hashalgs(2) 4 - #: } - #: - #: This value uniquely identifies the SHA-224 algorithm. - oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') - - digest_size = 28 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return SHA224Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `SHA224Hash.update()`. - Optional. - - :Return: A `SHA224Hash` object - """ - return SHA224Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = SHA224Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = SHA224Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA256.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA256.py deleted file mode 100644 index b0a99b3..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA256.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""SHA-256 cryptographic hash algorithm. - -SHA-256 belongs to the SHA-2_ family of cryptographic hashes. -It produces the 256 bit digest of a message. - - >>> from Crypto.Hash import SHA256 - >>> - >>> h = SHA256.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -*SHA* stands for Secure Hash Algorithm. - -.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'SHA256Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - import hashlib - hashFactory = hashlib.sha256 - -except ImportError: - from Crypto.Hash import _SHA256 - hashFactory = _SHA256 - -class SHA256Hash(HashAlgo): - """Class that implements a SHA-256 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-sha256 OBJECT IDENTIFIER ::= { - #: joint-iso-itu-t(2) country(16) us(840) organization(1) - #: gov(101) csor(3) nistalgorithm(4) hashalgs(2) 1 - #: } - #: - #: This value uniquely identifies the SHA-256 algorithm. - oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01') - - digest_size = 32 - block_size = 64 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return SHA256Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `SHA256Hash.update()`. - Optional. - - :Return: A `SHA256Hash` object - """ - return SHA256Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = SHA256Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = SHA256Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA384.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA384.py deleted file mode 100644 index 3490b02..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA384.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""SHA-384 cryptographic hash algorithm. - -SHA-384 belongs to the SHA-2_ family of cryptographic hashes. -It produces the 384 bit digest of a message. - - >>> from Crypto.Hash import SHA384 - >>> - >>> h = SHA384.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -*SHA* stands for Secure Hash Algorithm. - -.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'SHA384Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - import hashlib - hashFactory = hashlib.sha384 - -except ImportError: - from Crypto.Hash import _SHA384 - hashFactory = _SHA384 - -class SHA384Hash(HashAlgo): - """Class that implements a SHA-384 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-sha384 OBJECT IDENTIFIER ::= { - #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) - #: nistalgorithm(4) hashalgs(2) 2 - #: } - #: - #: This value uniquely identifies the SHA-384 algorithm. - oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') - - digest_size = 48 - block_size = 128 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return SHA384Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `SHA384Hash.update()`. - Optional. - - :Return: A `SHA384Hash` object - """ - return SHA384Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = SHA384Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = SHA384Hash.block_size - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA512.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA512.py deleted file mode 100644 index d57548d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/SHA512.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""SHA-512 cryptographic hash algorithm. - -SHA-512 belongs to the SHA-2_ family of cryptographic hashes. -It produces the 512 bit digest of a message. - - >>> from Crypto.Hash import SHA512 - >>> - >>> h = SHA512.new() - >>> h.update(b'Hello') - >>> print h.hexdigest() - -*SHA* stands for Secure Hash Algorithm. - -.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf -""" - -_revision__ = "$Id$" - -__all__ = ['new', 'digest_size', 'SHA512Hash' ] - -from Crypto.Util.py3compat import * -from Crypto.Hash.hashalgo import HashAlgo - -try: - import hashlib - hashFactory = hashlib.sha512 - -except ImportError: - from Crypto.Hash import _SHA512 - hashFactory = _SHA512 - -class SHA512Hash(HashAlgo): - """Class that implements a SHA-512 hash - - :undocumented: block_size - """ - - #: ASN.1 Object identifier (OID):: - #: - #: id-sha512 OBJECT IDENTIFIER ::= { - #: joint-iso-itu-t(2) - #: country(16) us(840) organization(1) gov(101) csor(3) nistalgorithm(4) hashalgs(2) 3 - #: } - #: - #: This value uniquely identifies the SHA-512 algorithm. - oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03') - - digest_size = 64 - block_size = 128 - - def __init__(self, data=None): - HashAlgo.__init__(self, hashFactory, data) - - def new(self, data=None): - return SHA512Hash(data) - -def new(data=None): - """Return a fresh instance of the hash object. - - :Parameters: - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `SHA512Hash.update()`. - Optional. - - :Return: A `SHA512Hash` object - """ - return SHA512Hash().new(data) - -#: The size of the resulting hash in bytes. -digest_size = SHA512Hash.digest_size - -#: The internal block size of the hash algorithm in bytes. -block_size = SHA512Hash.block_size - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD2.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD2.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index e037bb1..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD2.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD4.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD4.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 5befe08..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_MD4.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_RIPEMD160.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_RIPEMD160.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index c94700a..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_RIPEMD160.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA224.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA224.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index f965807..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA224.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA256.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA256.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 71e3a7e..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA256.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA384.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA384.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 0556c4a..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA384.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA512.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA512.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 006687b..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/_SHA512.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/__init__.py deleted file mode 100644 index 4582c66..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Hashing algorithms - -Hash functions take arbitrary binary strings as input, and produce a random-like output -of fixed size that is dependent on the input; it should be practically infeasible -to derive the original input data given only the hash function's -output. In other words, the hash function is *one-way*. - -It should also not be practically feasible to find a second piece of data -(a *second pre-image*) whose hash is the same as the original message -(*weak collision resistance*). - -Finally, it should not be feasible to find two arbitrary messages with the -same hash (*strong collision resistance*). - -The output of the hash function is called the *digest* of the input message. -In general, the security of a hash function is related to the length of the -digest. If the digest is *n* bits long, its security level is roughly comparable -to the the one offered by an *n/2* bit encryption algorithm. - -Hash functions can be used simply as a integrity check, or, in -association with a public-key algorithm, can be used to implement -digital signatures. - -The hashing modules here all support the interface described in `PEP -247`_ , "API for Cryptographic Hash Functions". - -.. _`PEP 247` : http://www.python.org/dev/peps/pep-0247/ - -:undocumented: _MD2, _MD4, _RIPEMD160, _SHA224, _SHA256, _SHA384, _SHA512 -""" - -__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD', 'SHA', - 'SHA224', 'SHA256', 'SHA384', 'SHA512'] -__revision__ = "$Id$" - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/hashalgo.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/hashalgo.py deleted file mode 100644 index b38b3a6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Hash/hashalgo.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from binascii import hexlify - -class HashAlgo: - """A generic class for an abstract cryptographic hash algorithm. - - :undocumented: block_size - """ - - #: The size of the resulting hash in bytes. - digest_size = None - #: The internal block size of the hash algorithm in bytes. - block_size = None - - def __init__(self, hashFactory, data=None): - """Initialize the hash object. - - :Parameters: - hashFactory : callable - An object that will generate the actual hash implementation. - *hashFactory* must have a *new()* method, or must be directly - callable. - data : byte string - The very first chunk of the message to hash. - It is equivalent to an early call to `update()`. - """ - if hasattr(hashFactory, 'new'): - self._hash = hashFactory.new() - else: - self._hash = hashFactory() - if data: - self.update(data) - - def update(self, data): - """Continue hashing of a message by consuming the next chunk of data. - - Repeated calls are equivalent to a single call with the concatenation - of all the arguments. In other words: - - >>> m.update(a); m.update(b) - - is equivalent to: - - >>> m.update(a+b) - - :Parameters: - data : byte string - The next chunk of the message being hashed. - """ - return self._hash.update(data) - - def digest(self): - """Return the **binary** (non-printable) digest of the message that has been hashed so far. - - This method does not change the state of the hash object. - You can continue updating the object after calling this function. - - :Return: A byte string of `digest_size` bytes. It may contain non-ASCII - characters, including null bytes. - """ - return self._hash.digest() - - def hexdigest(self): - """Return the **printable** digest of the message that has been hashed so far. - - This method does not change the state of the hash object. - - :Return: A string of 2* `digest_size` characters. It contains only - hexadecimal ASCII digits. - """ - return self._hash.hexdigest() - - def copy(self): - """Return a copy ("clone") of the hash object. - - The copy will have the same internal state as the original hash - object. - This can be used to efficiently compute the digests of strings that - share a common initial substring. - - :Return: A hash object of the same type - """ - return self._hash.copy() - - def new(self, data=None): - """Return a fresh instance of the hash object. - - Unlike the `copy` method, the internal state of the object is empty. - - :Parameters: - data : byte string - The next chunk of the message being hashed. - - :Return: A hash object of the same type - """ - pass - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/AllOrNothing.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/AllOrNothing.py deleted file mode 100644 index dd20536..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/AllOrNothing.py +++ /dev/null @@ -1,320 +0,0 @@ -# -# AllOrNothing.py : all-or-nothing package transformations -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""This file implements all-or-nothing package transformations. - -An all-or-nothing package transformation is one in which some text is -transformed into message blocks, such that all blocks must be obtained before -the reverse transformation can be applied. Thus, if any blocks are corrupted -or lost, the original message cannot be reproduced. - -An all-or-nothing package transformation is not encryption, although a block -cipher algorithm is used. The encryption key is randomly generated and is -extractable from the message blocks. - -This class implements the All-Or-Nothing package transformation algorithm -described in: - -Ronald L. Rivest. "All-Or-Nothing Encryption and The Package Transform" -http://theory.lcs.mit.edu/~rivest/fusion.pdf - -""" - -__revision__ = "$Id$" - -import operator -import sys -from Crypto.Util.number import bytes_to_long, long_to_bytes -from Crypto.Util.py3compat import * -from functools import reduce - -def isInt(x): - test = 0 - try: - test += x - except TypeError: - return 0 - return 1 - -class AllOrNothing: - """Class implementing the All-or-Nothing package transform. - - Methods for subclassing: - - _inventkey(key_size): - Returns a randomly generated key. Subclasses can use this to - implement better random key generating algorithms. The default - algorithm is probably not very cryptographically secure. - - """ - - def __init__(self, ciphermodule, mode=None, IV=None): - """AllOrNothing(ciphermodule, mode=None, IV=None) - - ciphermodule is a module implementing the cipher algorithm to - use. It must provide the PEP272 interface. - - Note that the encryption key is randomly generated - automatically when needed. Optional arguments mode and IV are - passed directly through to the ciphermodule.new() method; they - are the feedback mode and initialization vector to use. All - three arguments must be the same for the object used to create - the digest, and to undigest'ify the message blocks. - """ - - self.__ciphermodule = ciphermodule - self.__mode = mode - self.__IV = IV - self.__key_size = ciphermodule.key_size - if not isInt(self.__key_size) or self.__key_size==0: - self.__key_size = 16 - - __K0digit = bchr(0x69) - - def digest(self, text): - """digest(text:string) : [string] - - Perform the All-or-Nothing package transform on the given - string. Output is a list of message blocks describing the - transformed text, where each block is a string of bit length equal - to the ciphermodule's block_size. - """ - - # generate a random session key and K0, the key used to encrypt the - # hash blocks. Rivest calls this a fixed, publically-known encryption - # key, but says nothing about the security implications of this key or - # how to choose it. - key = self._inventkey(self.__key_size) - K0 = self.__K0digit * self.__key_size - - # we need two cipher objects here, one that is used to encrypt the - # message blocks and one that is used to encrypt the hashes. The - # former uses the randomly generated key, while the latter uses the - # well-known key. - mcipher = self.__newcipher(key) - hcipher = self.__newcipher(K0) - - # Pad the text so that its length is a multiple of the cipher's - # block_size. Pad with trailing spaces, which will be eliminated in - # the undigest() step. - block_size = self.__ciphermodule.block_size - padbytes = block_size - (len(text) % block_size) - text = text + b(' ') * padbytes - - # Run through the algorithm: - # s: number of message blocks (size of text / block_size) - # input sequence: m1, m2, ... ms - # random key K' (`key' in the code) - # Compute output sequence: m'1, m'2, ... m's' for s' = s + 1 - # Let m'i = mi ^ E(K', i) for i = 1, 2, 3, ..., s - # Let m's' = K' ^ h1 ^ h2 ^ ... hs - # where hi = E(K0, m'i ^ i) for i = 1, 2, ... s - # - # The one complication I add is that the last message block is hard - # coded to the number of padbytes added, so that these can be stripped - # during the undigest() step - s = divmod(len(text), block_size)[0] - blocks = [] - hashes = [] - for i in range(1, s+1): - start = (i-1) * block_size - end = start + block_size - mi = text[start:end] - assert len(mi) == block_size - cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) - mticki = bytes_to_long(mi) ^ bytes_to_long(cipherblock) - blocks.append(mticki) - # calculate the hash block for this block - hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) - hashes.append(bytes_to_long(hi)) - - # Add the padbytes length as a message block - i = i + 1 - cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) - mticki = padbytes ^ bytes_to_long(cipherblock) - blocks.append(mticki) - - # calculate this block's hash - hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) - hashes.append(bytes_to_long(hi)) - - # Now calculate the last message block of the sequence 1..s'. This - # will contain the random session key XOR'd with all the hash blocks, - # so that for undigest(), once all the hash blocks are calculated, the - # session key can be trivially extracted. Calculating all the hash - # blocks requires that all the message blocks be received, thus the - # All-or-Nothing algorithm succeeds. - mtick_stick = bytes_to_long(key) ^ reduce(operator.xor, hashes) - blocks.append(mtick_stick) - - # we convert the blocks to strings since in Python, byte sequences are - # always represented as strings. This is more consistent with the - # model that encryption and hash algorithms always operate on strings. - return [long_to_bytes(i,self.__ciphermodule.block_size) for i in blocks] - - - def undigest(self, blocks): - """undigest(blocks : [string]) : string - - Perform the reverse package transformation on a list of message - blocks. Note that the ciphermodule used for both transformations - must be the same. blocks is a list of strings of bit length - equal to the ciphermodule's block_size. - """ - - # better have at least 2 blocks, for the padbytes package and the hash - # block accumulator - if len(blocks) < 2: - raise ValueError("List must be at least length 2.") - - # blocks is a list of strings. We need to deal with them as long - # integers - blocks = list(map(bytes_to_long, blocks)) - - # Calculate the well-known key, to which the hash blocks are - # encrypted, and create the hash cipher. - K0 = self.__K0digit * self.__key_size - hcipher = self.__newcipher(K0) - block_size = self.__ciphermodule.block_size - - # Since we have all the blocks (or this method would have been called - # prematurely), we can calculate all the hash blocks. - hashes = [] - for i in range(1, len(blocks)): - mticki = blocks[i-1] ^ i - hi = hcipher.encrypt(long_to_bytes(mticki, block_size)) - hashes.append(bytes_to_long(hi)) - - # now we can calculate K' (key). remember the last block contains - # m's' which we don't include here - key = blocks[-1] ^ reduce(operator.xor, hashes) - - # and now we can create the cipher object - mcipher = self.__newcipher(long_to_bytes(key, self.__key_size)) - - # And we can now decode the original message blocks - parts = [] - for i in range(1, len(blocks)): - cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) - mi = blocks[i-1] ^ bytes_to_long(cipherblock) - parts.append(mi) - - # The last message block contains the number of pad bytes appended to - # the original text string, such that its length was an even multiple - # of the cipher's block_size. This number should be small enough that - # the conversion from long integer to integer should never overflow - padbytes = int(parts[-1]) - text = b('').join(map(long_to_bytes, parts[:-1])) - return text[:-padbytes] - - def _inventkey(self, key_size): - # Return key_size random bytes - from Crypto import Random - return Random.new().read(key_size) - - def __newcipher(self, key): - if self.__mode is None and self.__IV is None: - return self.__ciphermodule.new(key) - elif self.__IV is None: - return self.__ciphermodule.new(key, self.__mode) - else: - return self.__ciphermodule.new(key, self.__mode, self.__IV) - - - -if __name__ == '__main__': - import sys - import getopt - import base64 - - usagemsg = '''\ -Test module usage: %(program)s [-c cipher] [-l] [-h] - -Where: - --cipher module - -c module - Cipher module to use. Default: %(ciphermodule)s - - --aslong - -l - Print the encoded message blocks as long integers instead of base64 - encoded strings - - --help - -h - Print this help message -''' - - ciphermodule = 'AES' - aslong = 0 - - def usage(code, msg=None): - if msg: - print(msg) - print(usagemsg % {'program': sys.argv[0], - 'ciphermodule': ciphermodule}) - sys.exit(code) - - try: - opts, args = getopt.getopt(sys.argv[1:], - 'c:l', ['cipher=', 'aslong']) - except getopt.error as msg: - usage(1, msg) - - if args: - usage(1, 'Too many arguments') - - for opt, arg in opts: - if opt in ('-h', '--help'): - usage(0) - elif opt in ('-c', '--cipher'): - ciphermodule = arg - elif opt in ('-l', '--aslong'): - aslong = 1 - - # ugly hack to force __import__ to give us the end-path module - module = __import__('Crypto.Cipher.'+ciphermodule, None, None, ['new']) - - x = AllOrNothing(module) - print('Original text:\n==========') - print(__doc__) - print('==========') - msgblocks = x.digest(b(__doc__)) - print('message blocks:') - for i, blk in zip(list(range(len(msgblocks))), msgblocks): - # base64 adds a trailing newline - print(' %3d' % i, end=' ') - if aslong: - print(bytes_to_long(blk)) - else: - print(base64.encodestring(blk)[:-1]) - # - # get a new undigest-only object so there's no leakage - y = AllOrNothing(module) - text = y.undigest(msgblocks) - if text == b(__doc__): - print('They match!') - else: - print('They differ!') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/Chaffing.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/Chaffing.py deleted file mode 100644 index bbfcbda..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/Chaffing.py +++ /dev/null @@ -1,245 +0,0 @@ -# -# Chaffing.py : chaffing & winnowing support -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling, Barry A. Warsaw, and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -# -"""This file implements the chaffing algorithm. - -Winnowing and chaffing is a technique for enhancing privacy without requiring -strong encryption. In short, the technique takes a set of authenticated -message blocks (the wheat) and adds a number of chaff blocks which have -randomly chosen data and MAC fields. This means that to an adversary, the -chaff blocks look as valid as the wheat blocks, and so the authentication -would have to be performed on every block. By tailoring the number of chaff -blocks added to the message, the sender can make breaking the message -computationally infeasible. There are many other interesting properties of -the winnow/chaff technique. - -For example, say Alice is sending a message to Bob. She packetizes the -message and performs an all-or-nothing transformation on the packets. Then -she authenticates each packet with a message authentication code (MAC). The -MAC is a hash of the data packet, and there is a secret key which she must -share with Bob (key distribution is an exercise left to the reader). She then -adds a serial number to each packet, and sends the packets to Bob. - -Bob receives the packets, and using the shared secret authentication key, -authenticates the MACs for each packet. Those packets that have bad MACs are -simply discarded. The remainder are sorted by serial number, and passed -through the reverse all-or-nothing transform. The transform means that an -eavesdropper (say Eve) must acquire all the packets before any of the data can -be read. If even one packet is missing, the data is useless. - -There's one twist: by adding chaff packets, Alice and Bob can make Eve's job -much harder, since Eve now has to break the shared secret key, or try every -combination of wheat and chaff packet to read any of the message. The cool -thing is that Bob doesn't need to add any additional code; the chaff packets -are already filtered out because their MACs don't match (in all likelihood -- -since the data and MACs for the chaff packets are randomly chosen it is -possible, but very unlikely that a chaff MAC will match the chaff data). And -Alice need not even be the party adding the chaff! She could be completely -unaware that a third party, say Charles, is adding chaff packets to her -messages as they are transmitted. - -For more information on winnowing and chaffing see this paper: - -Ronald L. Rivest, "Chaffing and Winnowing: Confidentiality without Encryption" -http://theory.lcs.mit.edu/~rivest/chaffing.txt - -""" - -__revision__ = "$Id$" - -from Crypto.Util.number import bytes_to_long - -class Chaff: - """Class implementing the chaff adding algorithm. - - Methods for subclasses: - - _randnum(size): - Returns a randomly generated number with a byte-length equal - to size. Subclasses can use this to implement better random - data and MAC generating algorithms. The default algorithm is - probably not very cryptographically secure. It is most - important that the chaff data does not contain any patterns - that can be used to discern it from wheat data without running - the MAC. - - """ - - def __init__(self, factor=1.0, blocksper=1): - """Chaff(factor:float, blocksper:int) - - factor is the number of message blocks to add chaff to, - expressed as a percentage between 0.0 and 1.0. blocksper is - the number of chaff blocks to include for each block being - chaffed. Thus the defaults add one chaff block to every - message block. By changing the defaults, you can adjust how - computationally difficult it could be for an adversary to - brute-force crack the message. The difficulty is expressed - as: - - pow(blocksper, int(factor * number-of-blocks)) - - For ease of implementation, when factor < 1.0, only the first - int(factor*number-of-blocks) message blocks are chaffed. - """ - - if not (0.0<=factor<=1.0): - raise ValueError("'factor' must be between 0.0 and 1.0") - if blocksper < 0: - raise ValueError("'blocksper' must be zero or more") - - self.__factor = factor - self.__blocksper = blocksper - - - def chaff(self, blocks): - """chaff( [(serial-number:int, data:string, MAC:string)] ) - : [(int, string, string)] - - Add chaff to message blocks. blocks is a list of 3-tuples of the - form (serial-number, data, MAC). - - Chaff is created by choosing a random number of the same - byte-length as data, and another random number of the same - byte-length as MAC. The message block's serial number is - placed on the chaff block and all the packet's chaff blocks - are randomly interspersed with the single wheat block. This - method then returns a list of 3-tuples of the same form. - Chaffed blocks will contain multiple instances of 3-tuples - with the same serial number, but the only way to figure out - which blocks are wheat and which are chaff is to perform the - MAC hash and compare values. - """ - - chaffedblocks = [] - - # count is the number of blocks to add chaff to. blocksper is the - # number of chaff blocks to add per message block that is being - # chaffed. - count = len(blocks) * self.__factor - blocksper = list(range(self.__blocksper)) - for i, wheat in zip(list(range(len(blocks))), blocks): - # it shouldn't matter which of the n blocks we add chaff to, so for - # ease of implementation, we'll just add them to the first count - # blocks - if i < count: - serial, data, mac = wheat - datasize = len(data) - macsize = len(mac) - addwheat = 1 - # add chaff to this block - for j in blocksper: - import sys - chaffdata = self._randnum(datasize) - chaffmac = self._randnum(macsize) - chaff = (serial, chaffdata, chaffmac) - # mix up the order, if the 5th bit is on then put the - # wheat on the list - if addwheat and bytes_to_long(self._randnum(16)) & 0x40: - chaffedblocks.append(wheat) - addwheat = 0 - chaffedblocks.append(chaff) - if addwheat: - chaffedblocks.append(wheat) - else: - # just add the wheat - chaffedblocks.append(wheat) - return chaffedblocks - - def _randnum(self, size): - from Crypto import Random - return Random.new().read(size) - - -if __name__ == '__main__': - text = """\ -We hold these truths to be self-evident, that all men are created equal, that -they are endowed by their Creator with certain unalienable Rights, that among -these are Life, Liberty, and the pursuit of Happiness. That to secure these -rights, Governments are instituted among Men, deriving their just powers from -the consent of the governed. That whenever any Form of Government becomes -destructive of these ends, it is the Right of the People to alter or to -abolish it, and to institute new Government, laying its foundation on such -principles and organizing its powers in such form, as to them shall seem most -likely to effect their Safety and Happiness. -""" - print('Original text:\n==========') - print(text) - print('==========') - - # first transform the text into packets - blocks = [] ; size = 40 - for i in range(0, len(text), size): - blocks.append( text[i:i+size] ) - - # now get MACs for all the text blocks. The key is obvious... - print('Calculating MACs...') - from Crypto.Hash import HMAC, SHA - key = 'Jefferson' - macs = [HMAC.new(key, block, digestmod=SHA).digest() - for block in blocks] - - assert len(blocks) == len(macs) - - # put these into a form acceptable as input to the chaffing procedure - source = [] - m = list(zip(list(range(len(blocks))), blocks, macs)) - print(m) - for i, data, mac in m: - source.append((i, data, mac)) - - # now chaff these - print('Adding chaff...') - c = Chaff(factor=0.5, blocksper=2) - chaffed = c.chaff(source) - - from base64 import encodestring - - # print the chaffed message blocks. meanwhile, separate the wheat from - # the chaff - - wheat = [] - print('chaffed message blocks:') - for i, data, mac in chaffed: - # do the authentication - h = HMAC.new(key, data, digestmod=SHA) - pmac = h.digest() - if pmac == mac: - tag = '-->' - wheat.append(data) - else: - tag = ' ' - # base64 adds a trailing newline - print(tag, '%3d' % i, \ - repr(data), encodestring(mac)[:-1]) - - # now decode the message packets and check it against the original text - print('Undigesting wheat...') - # PY3K: This is meant to be text, do not change to bytes (data) - newtext = "".join(wheat) - if newtext == text: - print('They match!') - else: - print('They differ!') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/KDF.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/KDF.py deleted file mode 100644 index af4e2a6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/KDF.py +++ /dev/null @@ -1,123 +0,0 @@ -# -# KDF.py : a collection of Key Derivation Functions -# -# Part of the Python Cryptography Toolkit -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""This file contains a collection of standard key derivation functions. - -A key derivation function derives one or more secondary secret keys from -one primary secret (a master key or a pass phrase). - -This is typically done to insulate the secondary keys from each other, -to avoid that leakage of a secondary key compromises the security of the -master key, or to thwart attacks on pass phrases (e.g. via rainbow tables). - -:undocumented: __revision__ -""" - -__revision__ = "$Id$" - -import math -import struct - -from Crypto.Util.py3compat import * -from Crypto.Hash import SHA as SHA1, HMAC -from Crypto.Util.strxor import strxor - -def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): - """Derive one key from a password (or passphrase). - - This function performs key derivation according an old version of - the PKCS#5 standard (v1.5). - - This algorithm is called ``PBKDF1``. Even though it is still described - in the latest version of the PKCS#5 standard (version 2, or RFC2898), - newer applications should use the more secure and versatile `PBKDF2` instead. - - :Parameters: - password : string - The secret password or pass phrase to generate the key from. - salt : byte string - An 8 byte string to use for better protection from dictionary attacks. - This value does not need to be kept secret, but it should be randomly - chosen for each derivation. - dkLen : integer - The length of the desired key. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. - count : integer - The number of iterations to carry out. It's recommended to use at least 1000. - hashAlgo : module - The hash algorithm to use, as a module or an object from the `Crypto.Hash` package. - The digest length must be no shorter than ``dkLen``. - The default algorithm is `SHA1`. - - :Return: A byte string of length `dkLen` that can be used as key. - """ - if not hashAlgo: - hashAlgo = SHA1 - password = tobytes(password) - pHash = hashAlgo.new(password+salt) - digest = pHash.digest_size - if dkLen>digest: - raise ValueError("Selected hash algorithm has a too short digest (%d bytes)." % digest) - if len(salt)!=8: - raise ValueError("Salt is not 8 bytes long.") - for i in range(count-1): - pHash = pHash.new(pHash.digest()) - return pHash.digest()[:dkLen] - -def PBKDF2(password, salt, dkLen=16, count=1000, prf=None): - """Derive one or more keys from a password (or passphrase). - - This performs key derivation according to the PKCS#5 standard (v2.0), - by means of the ``PBKDF2`` algorithm. - - :Parameters: - password : string - The secret password or pass phrase to generate the key from. - salt : string - A string to use for better protection from dictionary attacks. - This value does not need to be kept secret, but it should be randomly - chosen for each derivation. It is recommended to be at least 8 bytes long. - dkLen : integer - The cumulative length of the desired keys. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. - count : integer - The number of iterations to carry out. It's recommended to use at least 1000. - prf : callable - A pseudorandom function. It must be a function that returns a pseudorandom string - from two parameters: a secret and a salt. If not specified, HMAC-SHA1 is used. - - :Return: A byte string of length `dkLen` that can be used as key material. - If you wanted multiple keys, just break up this string into segments of the desired length. -""" - password = tobytes(password) - if prf is None: - prf = lambda p,s: HMAC.new(p,s,SHA1).digest() - key = b('') - i = 1 - while len(key)I", i)) - for j in range(count-1): - previousU = t = prf(password,previousU) - U = strxor(U,t) - key += U - i = i + 1 - return key[:dkLen] - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/__init__.py deleted file mode 100644 index cacc685..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Protocol/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Cryptographic protocols - -Implements various cryptographic protocols. (Don't expect to find -network protocols here.) - -Crypto.Protocol.AllOrNothing - Transforms a message into a set of message blocks, such that the blocks - can be recombined to get the message back. - -Crypto.Protocol.Chaffing - Takes a set of authenticated message blocks (the wheat) and adds a number - of randomly generated blocks (the chaff). - -Crypto.Protocol.KDF - A collection of standard key derivation functions. - -:undocumented: __revision__ -""" - -__all__ = ['AllOrNothing', 'Chaffing', 'KDF'] -__revision__ = "$Id$" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/DSA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/DSA.py deleted file mode 100644 index 648f4b2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/DSA.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# -# PublicKey/DSA.py : DSA signature primitive -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""DSA public-key signature algorithm. - -DSA_ is a widespread public-key signature algorithm. Its security is -based on the discrete logarithm problem (DLP_). Given a cyclic -group, a generator *g*, and an element *h*, it is hard -to find an integer *x* such that *g^x = h*. The problem is believed -to be difficult, and it has been proved such (and therefore secure) for -more than 30 years. - -The group is actually a sub-group over the integers modulo *p*, with *p* prime. -The sub-group order is *q*, which is prime too; it always holds that *(p-1)* is a multiple of *q*. -The cryptographic strength is linked to the magnitude of *p* and *q*. -The signer holds a value *x* (*0>> from Crypto.Random import random - >>> from Crypto.PublicKey import DSA - >>> from Crypto.Hash import SHA - >>> - >>> message = "Hello" - >>> key = DSA.generate(1024) - >>> h = SHA.new(message).digest() - >>> k = random.StrongRandom().randint(1,key.q-1) - >>> sig = key.sign(h,k) - >>> ... - >>> if key.verify(h,sig): - >>> print "OK" - >>> else: - >>> print "Incorrect signature" - -.. _DSA: http://en.wikipedia.org/wiki/Digital_Signature_Algorithm -.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf -.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf -""" - -__revision__ = "$Id$" - -__all__ = ['generate', 'construct', 'error', 'DSAImplementation', '_DSAobj'] - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -from Crypto.PublicKey import _DSA, _slowmath, pubkey -from Crypto import Random - -try: - from Crypto.PublicKey import _fastmath -except ImportError: - _fastmath = None - -class _DSAobj(pubkey.pubkey): - """Class defining an actual DSA key. - - :undocumented: __getstate__, __setstate__, __repr__, __getattr__ - """ - #: Dictionary of DSA parameters. - #: - #: A public key will only have the following entries: - #: - #: - **y**, the public key. - #: - **g**, the generator. - #: - **p**, the modulus. - #: - **q**, the order of the sub-group. - #: - #: A private key will also have: - #: - #: - **x**, the private key. - keydata = ['y', 'g', 'p', 'q', 'x'] - - def __init__(self, implementation, key): - self.implementation = implementation - self.key = key - - def __getattr__(self, attrname): - if attrname in self.keydata: - # For backward compatibility, allow the user to get (not set) the - # DSA key parameters directly from this object. - return getattr(self.key, attrname) - else: - raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) - - def sign(self, M, K): - """Sign a piece of data with DSA. - - :Parameter M: The piece of data to sign with DSA. It may - not be longer in bit size than the sub-group order (*q*). - :Type M: byte string or long - - :Parameter K: A secret number, chosen randomly in the closed - range *[1,q-1]*. - :Type K: long (recommended) or byte string (not recommended) - - :attention: selection of *K* is crucial for security. Generating a - random number larger than *q* and taking the modulus by *q* is - **not** secure, since smaller values will occur more frequently. - Generating a random number systematically smaller than *q-1* - (e.g. *floor((q-1)/8)* random bytes) is also **not** secure. In general, - it shall not be possible for an attacker to know the value of `any - bit of K`__. - - :attention: The number *K* shall not be reused for any other - operation and shall be discarded immediately. - - :attention: M must be a digest cryptographic hash, otherwise - an attacker may mount an existential forgery attack. - - :Return: A tuple with 2 longs. - - .. __: http://www.di.ens.fr/~pnguyen/pub_NgSh00.htm - """ - return pubkey.pubkey.sign(self, M, K) - - def verify(self, M, signature): - """Verify the validity of a DSA signature. - - :Parameter M: The expected message. - :Type M: byte string or long - - :Parameter signature: The DSA signature to verify. - :Type signature: A tuple with 2 longs as return by `sign` - - :Return: True if the signature is correct, False otherwise. - """ - return pubkey.pubkey.verify(self, M, signature) - - def _encrypt(self, c, K): - raise TypeError("DSA cannot encrypt") - - def _decrypt(self, c): - raise TypeError("DSA cannot decrypt") - - def _blind(self, m, r): - raise TypeError("DSA cannot blind") - - def _unblind(self, m, r): - raise TypeError("DSA cannot unblind") - - def _sign(self, m, k): - return self.key._sign(m, k) - - def _verify(self, m, sig): - (r, s) = sig - return self.key._verify(m, r, s) - - def has_private(self): - return self.key.has_private() - - def size(self): - return self.key.size() - - def can_blind(self): - return False - - def can_encrypt(self): - return False - - def can_sign(self): - return True - - def publickey(self): - return self.implementation.construct((self.key.y, self.key.g, self.key.p, self.key.q)) - - def __getstate__(self): - d = {} - for k in self.keydata: - try: - d[k] = getattr(self.key, k) - except AttributeError: - pass - return d - - def __setstate__(self, d): - if not hasattr(self, 'implementation'): - self.implementation = DSAImplementation() - t = [] - for k in self.keydata: - if k not in d: - break - t.append(d[k]) - self.key = self.implementation._math.dsa_construct(*tuple(t)) - - def __repr__(self): - attrs = [] - for k in self.keydata: - if k == 'p': - attrs.append("p(%d)" % (self.size()+1,)) - elif hasattr(self.key, k): - attrs.append(k) - if self.has_private(): - attrs.append("private") - # PY3K: This is meant to be text, do not change to bytes (data) - return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) - -class DSAImplementation(object): - """ - A DSA key factory. - - This class is only internally used to implement the methods of the - `Crypto.PublicKey.DSA` module. - """ - - def __init__(self, **kwargs): - """Create a new DSA key factory. - - :Keywords: - use_fast_math : bool - Specify which mathematic library to use: - - - *None* (default). Use fastest math available. - - *True* . Use fast math. - - *False* . Use slow math. - default_randfunc : callable - Specify how to collect random data: - - - *None* (default). Use Random.new().read(). - - not *None* . Use the specified function directly. - :Raise RuntimeError: - When **use_fast_math** =True but fast math is not available. - """ - use_fast_math = kwargs.get('use_fast_math', None) - if use_fast_math is None: # Automatic - if _fastmath is not None: - self._math = _fastmath - else: - self._math = _slowmath - - elif use_fast_math: # Explicitly select fast math - if _fastmath is not None: - self._math = _fastmath - else: - raise RuntimeError("fast math module not available") - - else: # Explicitly select slow math - self._math = _slowmath - - self.error = self._math.error - - # 'default_randfunc' parameter: - # None (default) - use Random.new().read - # not None - use the specified function - self._default_randfunc = kwargs.get('default_randfunc', None) - self._current_randfunc = None - - def _get_randfunc(self, randfunc): - if randfunc is not None: - return randfunc - elif self._current_randfunc is None: - self._current_randfunc = Random.new().read - return self._current_randfunc - - def generate(self, bits, randfunc=None, progress_func=None): - """Randomly generate a fresh, new DSA key. - - :Parameters: - bits : int - Key length, or size (in bits) of the DSA modulus - *p*. - It must be a multiple of 64, in the closed - interval [512,1024]. - randfunc : callable - Random number generation function; it should accept - a single integer N and return a string of random data - N bytes long. - If not specified, a new one will be instantiated - from ``Crypto.Random``. - progress_func : callable - Optional function that will be called with a short string - containing the key parameter currently being generated; - it's useful for interactive applications where a user is - waiting for a key to be generated. - - :attention: You should always use a cryptographically secure random number generator, - such as the one defined in the ``Crypto.Random`` module; **don't** just use the - current time and the ``random`` module. - - :Return: A DSA key object (`_DSAobj`). - - :Raise ValueError: - When **bits** is too little, too big, or not a multiple of 64. - """ - - # Check against FIPS 186-2, which says that the size of the prime p - # must be a multiple of 64 bits between 512 and 1024 - for i in (0, 1, 2, 3, 4, 5, 6, 7, 8): - if bits == 512 + 64*i: - return self._generate(bits, randfunc, progress_func) - - # The March 2006 draft of FIPS 186-3 also allows 2048 and 3072-bit - # primes, but only with longer q values. Since the current DSA - # implementation only supports a 160-bit q, we don't support larger - # values. - raise ValueError("Number of bits in p must be a multiple of 64 between 512 and 1024, not %d bits" % (bits,)) - - def _generate(self, bits, randfunc=None, progress_func=None): - rf = self._get_randfunc(randfunc) - obj = _DSA.generate_py(bits, rf, progress_func) # TODO: Don't use legacy _DSA module - key = self._math.dsa_construct(obj.y, obj.g, obj.p, obj.q, obj.x) - return _DSAobj(self, key) - - def construct(self, tup): - """Construct a DSA key from a tuple of valid DSA components. - - The modulus *p* must be a prime. - - The following equations must apply: - - - p-1 = 0 mod q - - g^x = y mod p - - 0 < x < q - - 1 < g < p - - :Parameters: - tup : tuple - A tuple of long integers, with 4 or 5 items - in the following order: - - 1. Public key (*y*). - 2. Sub-group generator (*g*). - 3. Modulus, finite field order (*p*). - 4. Sub-group order (*q*). - 5. Private key (*x*). Optional. - - :Return: A DSA key object (`_DSAobj`). - """ - key = self._math.dsa_construct(*tup) - return _DSAobj(self, key) - -_impl = DSAImplementation() -generate = _impl.generate -construct = _impl.construct -error = _impl.error - -# vim:set ts=4 sw=4 sts=4 expandtab: - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/ElGamal.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/ElGamal.py deleted file mode 100644 index 99af71c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/ElGamal.py +++ /dev/null @@ -1,373 +0,0 @@ -# -# ElGamal.py : ElGamal encryption/decryption and signatures -# -# Part of the Python Cryptography Toolkit -# -# Originally written by: A.M. Kuchling -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""ElGamal public-key algorithm (randomized encryption and signature). - -Signature algorithm -------------------- -The security of the ElGamal signature scheme is based (like DSA) on the discrete -logarithm problem (DLP_). Given a cyclic group, a generator *g*, -and an element *h*, it is hard to find an integer *x* such that *g^x = h*. - -The group is the largest multiplicative sub-group of the integers modulo *p*, -with *p* prime. -The signer holds a value *x* (*0>> from Crypto import Random - >>> from Crypto.Random import random - >>> from Crypto.PublicKey import ElGamal - >>> from Crypto.Util.number import GCD - >>> from Crypto.Hash import SHA - >>> - >>> message = "Hello" - >>> key = ElGamal.generate(1024, Random.new().read) - >>> h = SHA.new(message).digest() - >>> while 1: - >>> k = random.StrongRandom().randint(1,key.p-1) - >>> if GCD(k,key.p-1)==1: break - >>> sig = key.sign(h,k) - >>> ... - >>> if key.verify(h,sig): - >>> print "OK" - >>> else: - >>> print "Incorrect signature" - -.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf -.. _CDH: http://en.wikipedia.org/wiki/Computational_Diffie%E2%80%93Hellman_assumption -.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf -""" - -__revision__ = "$Id$" - -__all__ = ['generate', 'construct', 'error', 'ElGamalobj'] - -from Crypto.PublicKey.pubkey import * -from Crypto.Util import number - -class error (Exception): - pass - -# Generate an ElGamal key with N bits -def generate(bits, randfunc, progress_func=None): - """Randomly generate a fresh, new ElGamal key. - - The key will be safe for use for both encryption and signature - (although it should be used for **only one** purpose). - - :Parameters: - bits : int - Key length, or size (in bits) of the modulus *p*. - Recommended value is 2048. - randfunc : callable - Random number generation function; it should accept - a single integer N and return a string of random data - N bytes long. - progress_func : callable - Optional function that will be called with a short string - containing the key parameter currently being generated; - it's useful for interactive applications where a user is - waiting for a key to be generated. - - :attention: You should always use a cryptographically secure random number generator, - such as the one defined in the ``Crypto.Random`` module; **don't** just use the - current time and the ``random`` module. - - :Return: An ElGamal key object (`ElGamalobj`). - """ - obj=ElGamalobj() - # Generate a safe prime p - # See Algorithm 4.86 in Handbook of Applied Cryptography - if progress_func: - progress_func('p\n') - while 1: - q = bignum(getPrime(bits-1, randfunc)) - obj.p = 2*q+1 - if number.isPrime(obj.p, randfunc=randfunc): - break - # Generate generator g - # See Algorithm 4.80 in Handbook of Applied Cryptography - # Note that the order of the group is n=p-1=2q, where q is prime - if progress_func: - progress_func('g\n') - while 1: - # We must avoid g=2 because of Bleichenbacher's attack described - # in "Generating ElGamal signatures without knowning the secret key", - # 1996 - # - obj.g = number.getRandomRange(3, obj.p, randfunc) - safe = 1 - if pow(obj.g, 2, obj.p)==1: - safe=0 - if safe and pow(obj.g, q, obj.p)==1: - safe=0 - # Discard g if it divides p-1 because of the attack described - # in Note 11.67 (iii) in HAC - if safe and divmod(obj.p-1, obj.g)[1]==0: - safe=0 - # g^{-1} must not divide p-1 because of Khadir's attack - # described in "Conditions of the generator for forging ElGamal - # signature", 2011 - ginv = number.inverse(obj.g, obj.p) - if safe and divmod(obj.p-1, ginv)[1]==0: - safe=0 - if safe: - break - # Generate private key x - if progress_func: - progress_func('x\n') - obj.x=number.getRandomRange(2, obj.p-1, randfunc) - # Generate public key y - if progress_func: - progress_func('y\n') - obj.y = pow(obj.g, obj.x, obj.p) - return obj - -def construct(tup): - """Construct an ElGamal key from a tuple of valid ElGamal components. - - The modulus *p* must be a prime. - - The following conditions must apply: - - - 1 < g < p-1 - - g^{p-1} = 1 mod p - - 1 < x < p-1 - - g^x = y mod p - - :Parameters: - tup : tuple - A tuple of long integers, with 3 or 4 items - in the following order: - - 1. Modulus (*p*). - 2. Generator (*g*). - 3. Public key (*y*). - 4. Private key (*x*). Optional. - - :Return: An ElGamal key object (`ElGamalobj`). - """ - - obj=ElGamalobj() - if len(tup) not in [3,4]: - raise ValueError('argument for construct() wrong length') - for i in range(len(tup)): - field = obj.keydata[i] - setattr(obj, field, tup[i]) - return obj - -class ElGamalobj(pubkey): - """Class defining an ElGamal key. - - :undocumented: __getstate__, __setstate__, __repr__, __getattr__ - """ - - #: Dictionary of ElGamal parameters. - #: - #: A public key will only have the following entries: - #: - #: - **y**, the public key. - #: - **g**, the generator. - #: - **p**, the modulus. - #: - #: A private key will also have: - #: - #: - **x**, the private key. - keydata=['p', 'g', 'y', 'x'] - - def encrypt(self, plaintext, K): - """Encrypt a piece of data with ElGamal. - - :Parameter plaintext: The piece of data to encrypt with ElGamal. - It must be numerically smaller than the module (*p*). - :Type plaintext: byte string or long - - :Parameter K: A secret number, chosen randomly in the closed - range *[1,p-2]*. - :Type K: long (recommended) or byte string (not recommended) - - :Return: A tuple with two items. Each item is of the same type as the - plaintext (string or long). - - :attention: selection of *K* is crucial for security. Generating a - random number larger than *p-1* and taking the modulus by *p-1* is - **not** secure, since smaller values will occur more frequently. - Generating a random number systematically smaller than *p-1* - (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. - In general, it shall not be possible for an attacker to know - the value of any bit of K. - - :attention: The number *K* shall not be reused for any other - operation and shall be discarded immediately. - """ - return pubkey.encrypt(self, plaintext, K) - - def decrypt(self, ciphertext): - """Decrypt a piece of data with ElGamal. - - :Parameter ciphertext: The piece of data to decrypt with ElGamal. - :Type ciphertext: byte string, long or a 2-item tuple as returned - by `encrypt` - - :Return: A byte string if ciphertext was a byte string or a tuple - of byte strings. A long otherwise. - """ - return pubkey.decrypt(self, ciphertext) - - def sign(self, M, K): - """Sign a piece of data with ElGamal. - - :Parameter M: The piece of data to sign with ElGamal. It may - not be longer in bit size than *p-1*. - :Type M: byte string or long - - :Parameter K: A secret number, chosen randomly in the closed - range *[1,p-2]* and such that *gcd(k,p-1)=1*. - :Type K: long (recommended) or byte string (not recommended) - - :attention: selection of *K* is crucial for security. Generating a - random number larger than *p-1* and taking the modulus by *p-1* is - **not** secure, since smaller values will occur more frequently. - Generating a random number systematically smaller than *p-1* - (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. - In general, it shall not be possible for an attacker to know - the value of any bit of K. - - :attention: The number *K* shall not be reused for any other - operation and shall be discarded immediately. - - :attention: M must be be a cryptographic hash, otherwise an - attacker may mount an existential forgery attack. - - :Return: A tuple with 2 longs. - """ - return pubkey.sign(self, M, K) - - def verify(self, M, signature): - """Verify the validity of an ElGamal signature. - - :Parameter M: The expected message. - :Type M: byte string or long - - :Parameter signature: The ElGamal signature to verify. - :Type signature: A tuple with 2 longs as return by `sign` - - :Return: True if the signature is correct, False otherwise. - """ - return pubkey.verify(self, M, signature) - - def _encrypt(self, M, K): - a=pow(self.g, K, self.p) - b=( M*pow(self.y, K, self.p) ) % self.p - return ( a,b ) - - def _decrypt(self, M): - if (not hasattr(self, 'x')): - raise TypeError('Private key not available in this object') - ax=pow(M[0], self.x, self.p) - plaintext=(M[1] * inverse(ax, self.p ) ) % self.p - return plaintext - - def _sign(self, M, K): - if (not hasattr(self, 'x')): - raise TypeError('Private key not available in this object') - p1=self.p-1 - if (GCD(K, p1)!=1): - raise ValueError('Bad K value: GCD(K,p-1)!=1') - a=pow(self.g, K, self.p) - t=(M-self.x*a) % p1 - while t<0: t=t+p1 - b=(t*inverse(K, p1)) % p1 - return (a, b) - - def _verify(self, M, sig): - if sig[0]<1 or sig[0]>self.p-1: - return 0 - v1=pow(self.y, sig[0], self.p) - v1=(v1*pow(sig[0], sig[1], self.p)) % self.p - v2=pow(self.g, M, self.p) - if v1==v2: - return 1 - return 0 - - def size(self): - return number.size(self.p) - 1 - - def has_private(self): - if hasattr(self, 'x'): - return 1 - else: - return 0 - - def publickey(self): - return construct((self.p, self.g, self.y)) - - -object=ElGamalobj diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/RSA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/RSA.py deleted file mode 100644 index debe39e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/RSA.py +++ /dev/null @@ -1,719 +0,0 @@ -# -*- coding: utf-8 -*- -# -# PublicKey/RSA.py : RSA public key primitive -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""RSA public-key cryptography algorithm (signature and encryption). - -RSA_ is the most widespread and used public key algorithm. Its security is -based on the difficulty of factoring large integers. The algorithm has -withstood attacks for 30 years, and it is therefore considered reasonably -secure for new designs. - -The algorithm can be used for both confidentiality (encryption) and -authentication (digital signature). It is worth noting that signing and -decryption are significantly slower than verification and encryption. -The cryptograhic strength is primarily linked to the length of the modulus *n*. -In 2012, a sufficient length is deemed to be 2048 bits. For more information, -see the most recent ECRYPT_ report. - -Both RSA ciphertext and RSA signature are as big as the modulus *n* (256 -bytes if *n* is 2048 bit long). - -This module provides facilities for generating fresh, new RSA keys, constructing -them from known components, exporting them, and importing them. - - >>> from Crypto.PublicKey import RSA - >>> - >>> key = RSA.generate(2048) - >>> f = open('mykey.pem','w') - >>> f.write(RSA.exportKey('PEM')) - >>> f.close() - ... - >>> f = open('mykey.pem','r') - >>> key = RSA.importKey(f.read()) - -Even though you may choose to directly use the methods of an RSA key object -to perform the primitive cryptographic operations (e.g. `_RSAobj.encrypt`), -it is recommended to use one of the standardized schemes instead (like -`Crypto.Cipher.PKCS1_v1_5` or `Crypto.Signature.PKCS1_v1_5`). - -.. _RSA: http://en.wikipedia.org/wiki/RSA_%28algorithm%29 -.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf - -:sort: generate,construct,importKey,error -""" - -__revision__ = "$Id$" - -__all__ = ['generate', 'construct', 'error', 'importKey', 'RSAImplementation', '_RSAobj'] - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * -#from Crypto.Util.python_compat import * -from Crypto.Util.number import getRandomRange, bytes_to_long, long_to_bytes - -from Crypto.PublicKey import _RSA, _slowmath, pubkey -from Crypto import Random - -from Crypto.Util.asn1 import DerObject, DerSequence, DerNull -import binascii -import struct - -from Crypto.Util.number import inverse - -from Crypto.Util.number import inverse - -try: - from Crypto.PublicKey import _fastmath -except ImportError: - _fastmath = None - -class _RSAobj(pubkey.pubkey): - """Class defining an actual RSA key. - - :undocumented: __getstate__, __setstate__, __repr__, __getattr__ - """ - #: Dictionary of RSA parameters. - #: - #: A public key will only have the following entries: - #: - #: - **n**, the modulus. - #: - **e**, the public exponent. - #: - #: A private key will also have: - #: - #: - **d**, the private exponent. - #: - **p**, the first factor of n. - #: - **q**, the second factor of n. - #: - **u**, the CRT coefficient (1/p) mod q. - keydata = ['n', 'e', 'd', 'p', 'q', 'u'] - - def __init__(self, implementation, key, randfunc=None): - self.implementation = implementation - self.key = key - if randfunc is None: - randfunc = Random.new().read - self._randfunc = randfunc - - def __getattr__(self, attrname): - if attrname in self.keydata: - # For backward compatibility, allow the user to get (not set) the - # RSA key parameters directly from this object. - return getattr(self.key, attrname) - else: - raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) - - def encrypt(self, plaintext, K): - """Encrypt a piece of data with RSA. - - :Parameter plaintext: The piece of data to encrypt with RSA. It may not - be numerically larger than the RSA module (**n**). - :Type plaintext: byte string or long - - :Parameter K: A random parameter (*for compatibility only. This - value will be ignored*) - :Type K: byte string or long - - :attention: this function performs the plain, primitive RSA encryption - (*textbook*). In real applications, you always need to use proper - cryptographic padding, and you should not directly encrypt data with - this method. Failure to do so may lead to security vulnerabilities. - It is recommended to use modules - `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. - - :Return: A tuple with two items. The first item is the ciphertext - of the same type as the plaintext (string or long). The second item - is always None. - """ - return pubkey.pubkey.encrypt(self, plaintext, K) - - def decrypt(self, ciphertext): - """Decrypt a piece of data with RSA. - - Decryption always takes place with blinding. - - :attention: this function performs the plain, primitive RSA decryption - (*textbook*). In real applications, you always need to use proper - cryptographic padding, and you should not directly decrypt data with - this method. Failure to do so may lead to security vulnerabilities. - It is recommended to use modules - `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. - - :Parameter ciphertext: The piece of data to decrypt with RSA. It may - not be numerically larger than the RSA module (**n**). If a tuple, - the first item is the actual ciphertext; the second item is ignored. - - :Type ciphertext: byte string, long or a 2-item tuple as returned by - `encrypt` - - :Return: A byte string if ciphertext was a byte string or a tuple - of byte strings. A long otherwise. - """ - return pubkey.pubkey.decrypt(self, ciphertext) - - def sign(self, M, K): - """Sign a piece of data with RSA. - - Signing always takes place with blinding. - - :attention: this function performs the plain, primitive RSA decryption - (*textbook*). In real applications, you always need to use proper - cryptographic padding, and you should not directly sign data with - this method. Failure to do so may lead to security vulnerabilities. - It is recommended to use modules - `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. - - :Parameter M: The piece of data to sign with RSA. It may - not be numerically larger than the RSA module (**n**). - :Type M: byte string or long - - :Parameter K: A random parameter (*for compatibility only. This - value will be ignored*) - :Type K: byte string or long - - :Return: A 2-item tuple. The first item is the actual signature (a - long). The second item is always None. - """ - return pubkey.pubkey.sign(self, M, K) - - def verify(self, M, signature): - """Verify the validity of an RSA signature. - - :attention: this function performs the plain, primitive RSA encryption - (*textbook*). In real applications, you always need to use proper - cryptographic padding, and you should not directly verify data with - this method. Failure to do so may lead to security vulnerabilities. - It is recommended to use modules - `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. - - :Parameter M: The expected message. - :Type M: byte string or long - - :Parameter signature: The RSA signature to verify. The first item of - the tuple is the actual signature (a long not larger than the modulus - **n**), whereas the second item is always ignored. - :Type signature: A 2-item tuple as return by `sign` - - :Return: True if the signature is correct, False otherwise. - """ - return pubkey.pubkey.verify(self, M, signature) - - def _encrypt(self, c, K): - return (self.key._encrypt(c),) - - def _decrypt(self, c): - #(ciphertext,) = c - (ciphertext,) = c[:1] # HACK - We should use the previous line - # instead, but this is more compatible and we're - # going to replace the Crypto.PublicKey API soon - # anyway. - - # Blinded RSA decryption (to prevent timing attacks): - # Step 1: Generate random secret blinding factor r, such that 0 < r < n-1 - r = getRandomRange(1, self.key.n-1, randfunc=self._randfunc) - # Step 2: Compute c' = c * r**e mod n - cp = self.key._blind(ciphertext, r) - # Step 3: Compute m' = c'**d mod n (ordinary RSA decryption) - mp = self.key._decrypt(cp) - # Step 4: Compute m = m**(r-1) mod n - return self.key._unblind(mp, r) - - def _blind(self, m, r): - return self.key._blind(m, r) - - def _unblind(self, m, r): - return self.key._unblind(m, r) - - def _sign(self, m, K=None): - return (self.key._sign(m),) - - def _verify(self, m, sig): - #(s,) = sig - (s,) = sig[:1] # HACK - We should use the previous line instead, but - # this is more compatible and we're going to replace - # the Crypto.PublicKey API soon anyway. - return self.key._verify(m, s) - - def has_private(self): - return self.key.has_private() - - def size(self): - return self.key.size() - - def can_blind(self): - return True - - def can_encrypt(self): - return True - - def can_sign(self): - return True - - def publickey(self): - return self.implementation.construct((self.key.n, self.key.e)) - - def __getstate__(self): - d = {} - for k in self.keydata: - try: - d[k] = getattr(self.key, k) - except AttributeError: - pass - return d - - def __setstate__(self, d): - if not hasattr(self, 'implementation'): - self.implementation = RSAImplementation() - t = [] - for k in self.keydata: - if k not in d: - break - t.append(d[k]) - self.key = self.implementation._math.rsa_construct(*tuple(t)) - - def __repr__(self): - attrs = [] - for k in self.keydata: - if k == 'n': - attrs.append("n(%d)" % (self.size()+1,)) - elif hasattr(self.key, k): - attrs.append(k) - if self.has_private(): - attrs.append("private") - # PY3K: This is meant to be text, do not change to bytes (data) - return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) - - def exportKey(self, format='PEM', passphrase=None, pkcs=1): - """Export this RSA key. - - :Parameter format: The format to use for wrapping the key. - - - *'DER'*. Binary encoding, always unencrypted. - - *'PEM'*. Textual encoding, done according to `RFC1421`_/`RFC1423`_. - Unencrypted (default) or encrypted. - - *'OpenSSH'*. Textual encoding, done according to OpenSSH specification. - Only suitable for public keys (not private keys). - :Type format: string - - :Parameter passphrase: In case of PEM, the pass phrase to derive the encryption key from. - :Type passphrase: string - - :Parameter pkcs: The PKCS standard to follow for assembling the key. - You have two choices: - - - with **1**, the public key is embedded into an X.509 `SubjectPublicKeyInfo` DER SEQUENCE. - The private key is embedded into a `PKCS#1`_ `RSAPrivateKey` DER SEQUENCE. - This mode is the default. - - with **8**, the private key is embedded into a `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE. - This mode is not available for public keys. - - PKCS standards are not relevant for the *OpenSSH* format. - :Type pkcs: integer - - :Return: A byte string with the encoded public or private half. - :Raise ValueError: - When the format is unknown. - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt - .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt - """ - if passphrase is not None: - passphrase = tobytes(passphrase) - if format=='OpenSSH': - eb = long_to_bytes(self.e) - nb = long_to_bytes(self.n) - if bord(eb[0]) & 0x80: eb=bchr(0x00)+eb - if bord(nb[0]) & 0x80: nb=bchr(0x00)+nb - keyparts = [ 'ssh-rsa', eb, nb ] - keystring = ''.join([ struct.pack(">I",len(kp))+kp for kp in keyparts]) - return 'ssh-rsa '+binascii.b2a_base64(keystring)[:-1] - - # DER format is always used, even in case of PEM, which simply - # encodes it into BASE64. - der = DerSequence() - if self.has_private(): - keyType= { 1: 'RSA PRIVATE', 8: 'PRIVATE' }[pkcs] - der[:] = [ 0, self.n, self.e, self.d, self.p, self.q, - self.d % (self.p-1), self.d % (self.q-1), - inverse(self.q, self.p) ] - if pkcs==8: - derkey = der.encode() - der = DerSequence([0]) - der.append(algorithmIdentifier) - der.append(DerObject('OCTET STRING', derkey).encode()) - else: - keyType = "PUBLIC" - der.append(algorithmIdentifier) - bitmap = DerObject('BIT STRING') - derPK = DerSequence( [ self.n, self.e ] ) - bitmap.payload = bchr(0x00) + derPK.encode() - der.append(bitmap.encode()) - if format=='DER': - return der.encode() - if format=='PEM': - pem = b("-----BEGIN " + keyType + " KEY-----\n") - objenc = None - if passphrase and keyType.endswith('PRIVATE'): - # We only support 3DES for encryption - import Crypto.Hash.MD5 - from Crypto.Cipher import DES3 - from Crypto.Protocol.KDF import PBKDF1 - salt = self._randfunc(8) - key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) - key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) - objenc = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) - pem += b('Proc-Type: 4,ENCRYPTED\n') - pem += b('DEK-Info: DES-EDE3-CBC,') + binascii.b2a_hex(salt).upper() + b('\n\n') - - binaryKey = der.encode() - if objenc: - # Add PKCS#7-like padding - padding = objenc.block_size-len(binaryKey)%objenc.block_size - binaryKey = objenc.encrypt(binaryKey+bchr(padding)*padding) - - # Each BASE64 line can take up to 64 characters (=48 bytes of data) - chunks = [ binascii.b2a_base64(binaryKey[i:i+48]) for i in range(0, len(binaryKey), 48) ] - pem += b('').join(chunks) - pem += b("-----END " + keyType + " KEY-----") - return pem - return ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) - -class RSAImplementation(object): - """ - An RSA key factory. - - This class is only internally used to implement the methods of the `Crypto.PublicKey.RSA` module. - - :sort: __init__,generate,construct,importKey - :undocumented: _g*, _i* - """ - - def __init__(self, **kwargs): - """Create a new RSA key factory. - - :Keywords: - use_fast_math : bool - Specify which mathematic library to use: - - - *None* (default). Use fastest math available. - - *True* . Use fast math. - - *False* . Use slow math. - default_randfunc : callable - Specify how to collect random data: - - - *None* (default). Use Random.new().read(). - - not *None* . Use the specified function directly. - :Raise RuntimeError: - When **use_fast_math** =True but fast math is not available. - """ - use_fast_math = kwargs.get('use_fast_math', None) - if use_fast_math is None: # Automatic - if _fastmath is not None: - self._math = _fastmath - else: - self._math = _slowmath - - elif use_fast_math: # Explicitly select fast math - if _fastmath is not None: - self._math = _fastmath - else: - raise RuntimeError("fast math module not available") - - else: # Explicitly select slow math - self._math = _slowmath - - self.error = self._math.error - - self._default_randfunc = kwargs.get('default_randfunc', None) - self._current_randfunc = None - - def _get_randfunc(self, randfunc): - if randfunc is not None: - return randfunc - elif self._current_randfunc is None: - self._current_randfunc = Random.new().read - return self._current_randfunc - - def generate(self, bits, randfunc=None, progress_func=None, e=65537): - """Randomly generate a fresh, new RSA key. - - :Parameters: - bits : int - Key length, or size (in bits) of the RSA modulus. - It must be a multiple of 256, and no smaller than 1024. - - randfunc : callable - Random number generation function; it should accept - a single integer N and return a string of random data - N bytes long. - If not specified, a new one will be instantiated - from ``Crypto.Random``. - - progress_func : callable - Optional function that will be called with a short string - containing the key parameter currently being generated; - it's useful for interactive applications where a user is - waiting for a key to be generated. - - e : int - Public RSA exponent. It must be an odd positive integer. - It is typically a small number with very few ones in its - binary representation. - The default value 65537 (= ``0b10000000000000001`` ) is a safe - choice: other common values are 5, 7, 17, and 257. - - :attention: You should always use a cryptographically secure random number generator, - such as the one defined in the ``Crypto.Random`` module; **don't** just use the - current time and the ``random`` module. - - :attention: Exponent 3 is also widely used, but it requires very special care when padding - the message. - - :Return: An RSA key object (`_RSAobj`). - - :Raise ValueError: - When **bits** is too little or not a multiple of 256, or when - **e** is not odd or smaller than 2. - """ - if bits < 1024 or (bits & 0xff) != 0: - # pubkey.getStrongPrime doesn't like anything that's not a multiple of 256 and >= 1024 - raise ValueError("RSA modulus length must be a multiple of 256 and >= 1024") - if e%2==0 or e<3: - raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") - rf = self._get_randfunc(randfunc) - obj = _RSA.generate_py(bits, rf, progress_func, e) # TODO: Don't use legacy _RSA module - key = self._math.rsa_construct(obj.n, obj.e, obj.d, obj.p, obj.q, obj.u) - return _RSAobj(self, key) - - def construct(self, tup): - """Construct an RSA key from a tuple of valid RSA components. - - The modulus **n** must be the product of two primes. - The public exponent **e** must be odd and larger than 1. - - In case of a private key, the following equations must apply: - - - e != 1 - - p*q = n - - e*d = 1 mod (p-1)(q-1) - - p*u = 1 mod q - - :Parameters: - tup : tuple - A tuple of long integers, with at least 2 and no - more than 6 items. The items come in the following order: - - 1. RSA modulus (n). - 2. Public exponent (e). - 3. Private exponent (d). Only required if the key is private. - 4. First factor of n (p). Optional. - 5. Second factor of n (q). Optional. - 6. CRT coefficient, (1/p) mod q (u). Optional. - - :Return: An RSA key object (`_RSAobj`). - """ - key = self._math.rsa_construct(*tup) - return _RSAobj(self, key) - - def _importKeyDER(self, externKey): - """Import an RSA key (public or private half), encoded in DER form.""" - - try: - - der = DerSequence() - der.decode(externKey, True) - - # Try PKCS#1 first, for a private key - if len(der)==9 and der.hasOnlyInts() and der[0]==0: - # ASN.1 RSAPrivateKey element - del der[6:] # Remove d mod (p-1), d mod (q-1), and q^{-1} mod p - der.append(inverse(der[4],der[5])) # Add p^{-1} mod q - del der[0] # Remove version - return self.construct(der[:]) - - # Keep on trying PKCS#1, but now for a public key - if len(der)==2: - # The DER object is an RSAPublicKey SEQUENCE with two elements - if der.hasOnlyInts(): - return self.construct(der[:]) - # The DER object is a SubjectPublicKeyInfo SEQUENCE with two elements: - # an 'algorithm' (or 'algorithmIdentifier') SEQUENCE and a 'subjectPublicKey' BIT STRING. - # 'algorithm' takes the value given a few lines above. - # 'subjectPublicKey' encapsulates the actual ASN.1 RSAPublicKey element. - if der[0]==algorithmIdentifier: - bitmap = DerObject() - bitmap.decode(der[1], True) - if bitmap.isType('BIT STRING') and bord(bitmap.payload[0])==0x00: - der.decode(bitmap.payload[1:], True) - if len(der)==2 and der.hasOnlyInts(): - return self.construct(der[:]) - - # Try unencrypted PKCS#8 - if der[0]==0: - # The second element in the SEQUENCE is algorithmIdentifier. - # It must say RSA (see above for description). - if der[1]==algorithmIdentifier: - privateKey = DerObject() - privateKey.decode(der[2], True) - if privateKey.isType('OCTET STRING'): - return self._importKeyDER(privateKey.payload) - - except ValueError as IndexError: - pass - - raise ValueError("RSA key format is not supported") - - def importKey(self, externKey, passphrase=None): - """Import an RSA key (public or private half), encoded in standard form. - - :Parameter externKey: - The RSA key to import, encoded as a string. - - An RSA public key can be in any of the following formats: - - - X.509 `subjectPublicKeyInfo` DER SEQUENCE (binary or PEM encoding) - - `PKCS#1`_ `RSAPublicKey` DER SEQUENCE (binary or PEM encoding) - - OpenSSH (textual public key only) - - An RSA private key can be in any of the following formats: - - - PKCS#1 `RSAPrivateKey` DER SEQUENCE (binary or PEM encoding) - - `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE (binary or PEM encoding) - - OpenSSH (textual public key only) - - For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. - - In case of PEM encoding, the private key can be encrypted with DES or 3TDES according to a certain ``pass phrase``. - Only OpenSSL-compatible pass phrases are supported. - :Type externKey: string - - :Parameter passphrase: - In case of an encrypted PEM key, this is the pass phrase from which the encryption key is derived. - :Type passphrase: string - - :Return: An RSA key object (`_RSAobj`). - - :Raise ValueError/IndexError/TypeError: - When the given key cannot be parsed (possibly because the pass phrase is wrong). - - .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt - .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt - .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt - .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt - """ - externKey = tobytes(externKey) - if passphrase is not None: - passphrase = tobytes(passphrase) - - if externKey.startswith(b('-----')): - # This is probably a PEM encoded key - lines = externKey.replace(b(" "),b('')).split() - keyobj = None - - # The encrypted PEM format - if lines[1].startswith(b('Proc-Type:4,ENCRYPTED')): - DEK = lines[2].split(b(':')) - if len(DEK)!=2 or DEK[0]!=b('DEK-Info') or not passphrase: - raise ValueError("PEM encryption format not supported.") - algo, salt = DEK[1].split(b(',')) - salt = binascii.a2b_hex(salt) - import Crypto.Hash.MD5 - from Crypto.Cipher import DES, DES3 - from Crypto.Protocol.KDF import PBKDF1 - if algo==b("DES-CBC"): - # This is EVP_BytesToKey in OpenSSL - key = PBKDF1(passphrase, salt, 8, 1, Crypto.Hash.MD5) - keyobj = DES.new(key, Crypto.Cipher.DES.MODE_CBC, salt) - elif algo==b("DES-EDE3-CBC"): - # Note that EVP_BytesToKey is note exactly the same as PBKDF1 - key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) - key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) - keyobj = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) - else: - raise ValueError("Unsupport PEM encryption algorithm.") - lines = lines[2:] - - der = binascii.a2b_base64(b('').join(lines[1:-1])) - if keyobj: - der = keyobj.decrypt(der) - padding = bord(der[-1]) - der = der[:-padding] - return self._importKeyDER(der) - - if externKey.startswith(b('ssh-rsa ')): - # This is probably an OpenSSH key - keystring = binascii.a2b_base64(externKey.split(b(' '))[1]) - keyparts = [] - while len(keystring)>4: - l = struct.unpack(">I",keystring[:4])[0] - keyparts.append(keystring[4:4+l]) - keystring = keystring[4+l:] - e = bytes_to_long(keyparts[1]) - n = bytes_to_long(keyparts[2]) - return self.construct([n, e]) - if bord(externKey[0])==0x30: - # This is probably a DER encoded key - return self._importKeyDER(externKey) - - raise ValueError("RSA key format is not supported") - -#: This is the ASN.1 DER object that qualifies an algorithm as -#: compliant to PKCS#1 (that is, the standard RSA). -# It is found in all 'algorithm' fields (also called 'algorithmIdentifier'). -# It is a SEQUENCE with the oid assigned to RSA and with its parameters (none). -# 0x06 0x09 OBJECT IDENTIFIER, 9 bytes of payload -# 0x2A 0x86 0x48 0x86 0xF7 0x0D 0x01 0x01 0x01 -# rsaEncryption (1 2 840 113549 1 1 1) (PKCS #1) -# 0x05 0x00 NULL -algorithmIdentifier = DerSequence( - [ b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01'), - DerNull().encode() ] - ).encode() - -_impl = RSAImplementation() -#: -#: Randomly generate a fresh, new RSA key object. -#: -#: See `RSAImplementation.generate`. -#: -generate = _impl.generate -#: -#: Construct an RSA key object from a tuple of valid RSA components. -#: -#: See `RSAImplementation.construct`. -#: -construct = _impl.construct -#: -#: Import an RSA key (public or private half), encoded in standard form. -#: -#: See `RSAImplementation.importKey`. -#: -importKey = _impl.importKey -error = _impl.error - -# vim:set ts=4 sw=4 sts=4 expandtab: - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_DSA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_DSA.py deleted file mode 100644 index 1787ced..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_DSA.py +++ /dev/null @@ -1,115 +0,0 @@ - -# -# DSA.py : Digital Signature Algorithm -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling, Paul Swartz, and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -# - -__revision__ = "$Id$" - -from Crypto.PublicKey.pubkey import * -from Crypto.Util import number -from Crypto.Util.number import bytes_to_long, long_to_bytes -from Crypto.Hash import SHA -from Crypto.Util.py3compat import * - -class error (Exception): - pass - -def generateQ(randfunc): - S=randfunc(20) - hash1=SHA.new(S).digest() - hash2=SHA.new(long_to_bytes(bytes_to_long(S)+1)).digest() - q = bignum(0) - for i in range(0,20): - c=bord(hash1[i])^bord(hash2[i]) - if i==0: - c=c | 128 - if i==19: - c= c | 1 - q=q*256+c - while (not isPrime(q)): - q=q+2 - if pow(2,159) < q < pow(2,160): - return S, q - raise RuntimeError('Bad q value generated') - -def generate_py(bits, randfunc, progress_func=None): - """generate(bits:int, randfunc:callable, progress_func:callable) - - Generate a DSA key of length 'bits', using 'randfunc' to get - random data and 'progress_func', if present, to display - the progress of the key generation. - """ - - if bits<160: - raise ValueError('Key length < 160 bits') - obj=DSAobj() - # Generate string S and prime q - if progress_func: - progress_func('p,q\n') - while (1): - S, obj.q = generateQ(randfunc) - n=divmod(bits-1, 160)[0] - C, N, V = 0, 2, {} - b=(obj.q >> 5) & 15 - powb=pow(bignum(2), b) - powL1=pow(bignum(2), bits-1) - while C<4096: - for k in range(0, n+1): - V[k]=bytes_to_long(SHA.new(S+bstr(N)+bstr(k)).digest()) - W=V[n] % powb - for k in range(n-1, -1, -1): - W=(W<<160)+V[k] - X=W+powL1 - p=X-(X%(2*obj.q)-1) - if powL1<=p and isPrime(p): - break - C, N = C+1, N+n+1 - if C<4096: - break - if progress_func: - progress_func('4096 multiples failed\n') - - obj.p = p - power=divmod(p-1, obj.q)[0] - if progress_func: - progress_func('h,g\n') - while (1): - h=bytes_to_long(randfunc(bits)) % (p-1) - g=pow(h, power, p) - if 11: - break - obj.g=g - if progress_func: - progress_func('x,y\n') - while (1): - x=bytes_to_long(randfunc(20)) - if 0 < x < obj.q: - break - obj.x, obj.y = x, pow(g, x, p) - return obj - -class DSAobj: - pass - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_RSA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_RSA.py deleted file mode 100644 index 601ab7c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_RSA.py +++ /dev/null @@ -1,81 +0,0 @@ -# -# RSA.py : RSA encryption/decryption -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling, Paul Swartz, and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -# - -__revision__ = "$Id$" - -from Crypto.PublicKey import pubkey -from Crypto.Util import number - -def generate_py(bits, randfunc, progress_func=None, e=65537): - """generate(bits:int, randfunc:callable, progress_func:callable, e:int) - - Generate an RSA key of length 'bits', public exponent 'e'(which must be - odd), using 'randfunc' to get random data and 'progress_func', - if present, to display the progress of the key generation. - """ - obj=RSAobj() - obj.e = int(e) - - # Generate the prime factors of n - if progress_func: - progress_func('p,q\n') - p = q = 1 - while number.size(p*q) < bits: - # Note that q might be one bit longer than p if somebody specifies an odd - # number of bits for the key. (Why would anyone do that? You don't get - # more security.) - p = pubkey.getStrongPrime(bits>>1, obj.e, 1e-12, randfunc) - q = pubkey.getStrongPrime(bits - (bits>>1), obj.e, 1e-12, randfunc) - - # It's OK for p to be larger than q, but let's be - # kind to the function that will invert it for - # th calculation of u. - if p > q: - (p, q)=(q, p) - obj.p = p - obj.q = q - - if progress_func: - progress_func('u\n') - obj.u = pubkey.inverse(obj.p, obj.q) - obj.n = obj.p*obj.q - - if progress_func: - progress_func('d\n') - obj.d=pubkey.inverse(obj.e, (obj.p-1)*(obj.q-1)) - - assert bits <= 1+obj.size(), "Generated key is too small" - - return obj - -class RSAobj(pubkey.pubkey): - - def size(self): - """size() : int - Return the maximum number of bits that can be handled by this key. - """ - return number.size(self.n) - 1 - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/__init__.py deleted file mode 100644 index 503809f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Public-key encryption and signature algorithms. - -Public-key encryption uses two different keys, one for encryption and -one for decryption. The encryption key can be made public, and the -decryption key is kept private. Many public-key algorithms can also -be used to sign messages, and some can *only* be used for signatures. - -======================== ============================================= -Module Description -======================== ============================================= -Crypto.PublicKey.DSA Digital Signature Algorithm (Signature only) -Crypto.PublicKey.ElGamal (Signing and encryption) -Crypto.PublicKey.RSA (Signing, encryption, and blinding) -======================== ============================================= - -:undocumented: _DSA, _RSA, _fastmath, _slowmath, pubkey -""" - -__all__ = ['RSA', 'DSA', 'ElGamal'] -__revision__ = "$Id$" - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_slowmath.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_slowmath.py deleted file mode 100644 index c87bdd2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/PublicKey/_slowmath.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -# -# PubKey/RSA/_slowmath.py : Pure Python implementation of the RSA portions of _fastmath -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Pure Python implementation of the RSA-related portions of Crypto.PublicKey._fastmath.""" - -__revision__ = "$Id$" - -__all__ = ['rsa_construct'] - -import sys - -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.number import size, inverse, GCD - -class error(Exception): - pass - -class _RSAKey(object): - def _blind(self, m, r): - # compute r**e * m (mod n) - return m * pow(r, self.e, self.n) - - def _unblind(self, m, r): - # compute m / r (mod n) - return inverse(r, self.n) * m % self.n - - def _decrypt(self, c): - # compute c**d (mod n) - if not self.has_private(): - raise TypeError("No private key") - if (hasattr(self,'p') and hasattr(self,'q') and hasattr(self,'u')): - m1 = pow(c, self.d % (self.p-1), self.p) - m2 = pow(c, self.d % (self.q-1), self.q) - h = m2 - m1 - if (h<0): - h = h + self.q - h = h*self.u % self.q - return h*self.p+m1 - return pow(c, self.d, self.n) - - def _encrypt(self, m): - # compute m**d (mod n) - return pow(m, self.e, self.n) - - def _sign(self, m): # alias for _decrypt - if not self.has_private(): - raise TypeError("No private key") - return self._decrypt(m) - - def _verify(self, m, sig): - return self._encrypt(sig) == m - - def has_private(self): - return hasattr(self, 'd') - - def size(self): - """Return the maximum number of bits that can be encrypted""" - return size(self.n) - 1 - -def rsa_construct(n, e, d=None, p=None, q=None, u=None): - """Construct an RSAKey object""" - assert isinstance(n, int) - assert isinstance(e, int) - assert isinstance(d, (int, type(None))) - assert isinstance(p, (int, type(None))) - assert isinstance(q, (int, type(None))) - assert isinstance(u, (int, type(None))) - obj = _RSAKey() - obj.n = n - obj.e = e - if d is None: - return obj - obj.d = d - if p is not None and q is not None: - obj.p = p - obj.q = q - else: - # Compute factors p and q from the private exponent d. - # We assume that n has no more than two factors. - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d*e-1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t%2==0: - t=divmod(t,2)[0] - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = 0 - a = 2 - while not spotted and a<100: - k = t - # Cycle through all values a^{t*2^i}=a^k - while k -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -from binascii import b2a_hex -import time -import warnings - -from Crypto.pct_warnings import ClockRewindWarning -from . import SHAd256 - -from . import FortunaGenerator - -class FortunaPool(object): - """Fortuna pool type - - This object acts like a hash object, with the following differences: - - - It keeps a count (the .length attribute) of the number of bytes that - have been added to the pool - - It supports a .reset() method for in-place reinitialization - - The method to add bytes to the pool is .append(), not .update(). - """ - - digest_size = SHAd256.digest_size - - def __init__(self): - self.reset() - - def append(self, data): - self._h.update(data) - self.length += len(data) - - def digest(self): - return self._h.digest() - - def hexdigest(self): - if sys.version_info[0] == 2: - return b2a_hex(self.digest()) - else: - return b2a_hex(self.digest()).decode() - - def reset(self): - self._h = SHAd256.new() - self.length = 0 - -def which_pools(r): - """Return a list of pools indexes (in range(32)) that are to be included during reseed number r. - - According to _Practical Cryptography_, chapter 10.5.2 "Pools": - - "Pool P_i is included if 2**i is a divisor of r. Thus P_0 is used - every reseed, P_1 every other reseed, P_2 every fourth reseed, etc." - """ - # This is a separate function so that it can be unit-tested. - assert r >= 1 - retval = [] - mask = 0 - for i in range(32): - # "Pool P_i is included if 2**i is a divisor of [reseed_count]" - if (r & mask) == 0: - retval.append(i) - else: - break # optimization. once this fails, it always fails - mask = (mask << 1) | 1 - return retval - -class FortunaAccumulator(object): - - # An estimate of how many bytes we must append to pool 0 before it will - # contain 128 bits of entropy (with respect to an attack). We reseed the - # generator only after pool 0 contains `min_pool_size` bytes. Note that - # unlike with some other PRNGs, Fortuna's security does not rely on the - # accuracy of this estimate---we can accord to be optimistic here. - min_pool_size = 64 # size in bytes - - # If an attacker can predict some (but not all) of our entropy sources, the - # `min_pool_size` check may not be sufficient to prevent a successful state - # compromise extension attack. To resist this attack, Fortuna spreads the - # input across 32 pools, which are then consumed (to reseed the output - # generator) with exponentially decreasing frequency. - # - # In order to prevent an attacker from gaining knowledge of all 32 pools - # before we have a chance to fill them with enough information that the - # attacker cannot predict, we impose a rate limit of 10 reseeds/second (one - # per 100 ms). This ensures that a hypothetical 33rd pool would only be - # needed after a minimum of 13 years of sustained attack. - reseed_interval = 0.100 # time in seconds - - def __init__(self): - self.reseed_count = 0 - self.generator = FortunaGenerator.AESGenerator() - self.last_reseed = None - - # Initialize 32 FortunaPool instances. - # NB: This is _not_ equivalent to [FortunaPool()]*32, which would give - # us 32 references to the _same_ FortunaPool instance (and cause the - # assertion below to fail). - self.pools = [FortunaPool() for i in range(32)] # 32 pools - assert(self.pools[0] is not self.pools[1]) - - def _forget_last_reseed(self): - # This is not part of the standard Fortuna definition, and using this - # function frequently can weaken Fortuna's ability to resist a state - # compromise extension attack, but we need this in order to properly - # implement Crypto.Random.atfork(). Otherwise, forked child processes - # might continue to use their parent's PRNG state for up to 100ms in - # some cases. (e.g. CVE-2013-1445) - self.last_reseed = None - - def random_data(self, bytes): - current_time = time.time() - if (self.last_reseed is not None and self.last_reseed > current_time): # Avoid float comparison to None to make Py3k happy - warnings.warn("Clock rewind detected. Resetting last_reseed.", ClockRewindWarning) - self.last_reseed = None - if (self.pools[0].length >= self.min_pool_size and - (self.last_reseed is None or - current_time > self.last_reseed + self.reseed_interval)): - self._reseed(current_time) - # The following should fail if we haven't seeded the pool yet. - return self.generator.pseudo_random_data(bytes) - - def _reseed(self, current_time=None): - if current_time is None: - current_time = time.time() - seed = [] - self.reseed_count += 1 - self.last_reseed = current_time - for i in which_pools(self.reseed_count): - seed.append(self.pools[i].digest()) - self.pools[i].reset() - - seed = b("").join(seed) - self.generator.reseed(seed) - - def add_random_event(self, source_number, pool_number, data): - assert 1 <= len(data) <= 32 - assert 0 <= source_number <= 255 - assert 0 <= pool_number <= 31 - self.pools[pool_number].append(bchr(source_number)) - self.pools[pool_number].append(bchr(len(data))) - self.pools[pool_number].append(data) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py deleted file mode 100644 index 489c81e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: ascii -*- -# -# FortunaGenerator.py : Fortuna's internal PRNG -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] is 2 and sys.version_info[1] is 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import struct - -from Crypto.Util.number import ceil_shift, exact_log2, exact_div -from Crypto.Util import Counter -from Crypto.Cipher import AES - -from . import SHAd256 - -class AESGenerator(object): - """The Fortuna "generator" - - This is used internally by the Fortuna PRNG to generate arbitrary amounts - of pseudorandom data from a smaller amount of seed data. - - The output is generated by running AES-256 in counter mode and re-keying - after every mebibyte (2**16 blocks) of output. - """ - - block_size = AES.block_size # output block size in octets (128 bits) - key_size = 32 # key size in octets (256 bits) - - # Because of the birthday paradox, we expect to find approximately one - # collision for every 2**64 blocks of output from a real random source. - # However, this code generates pseudorandom data by running AES in - # counter mode, so there will be no collisions until the counter - # (theoretically) wraps around at 2**128 blocks. Thus, in order to prevent - # Fortuna's pseudorandom output from deviating perceptibly from a true - # random source, Ferguson and Schneier specify a limit of 2**16 blocks - # without rekeying. - max_blocks_per_request = 2**16 # Allow no more than this number of blocks per _pseudo_random_data request - - _four_kiblocks_of_zeros = b("\0") * block_size * 4096 - - def __init__(self): - self.counter = Counter.new(nbits=self.block_size*8, initial_value=0, little_endian=True) - self.key = None - - # Set some helper constants - self.block_size_shift = exact_log2(self.block_size) - assert (1 << self.block_size_shift) == self.block_size - - self.blocks_per_key = exact_div(self.key_size, self.block_size) - assert self.key_size == self.blocks_per_key * self.block_size - - self.max_bytes_per_request = self.max_blocks_per_request * self.block_size - - def reseed(self, seed): - if self.key is None: - self.key = b("\0") * self.key_size - - self._set_key(SHAd256.new(self.key + seed).digest()) - self.counter() # increment counter - assert len(self.key) == self.key_size - - def pseudo_random_data(self, bytes): - assert bytes >= 0 - - num_full_blocks = bytes >> 20 - remainder = bytes & ((1<<20)-1) - - retval = [] - for i in range(num_full_blocks): - retval.append(self._pseudo_random_data(1<<20)) - retval.append(self._pseudo_random_data(remainder)) - - return b("").join(retval) - - def _set_key(self, key): - self.key = key - self._cipher = AES.new(key, AES.MODE_CTR, counter=self.counter) - - def _pseudo_random_data(self, bytes): - if not (0 <= bytes <= self.max_bytes_per_request): - raise AssertionError("You cannot ask for more than 1 MiB of data per request") - - num_blocks = ceil_shift(bytes, self.block_size_shift) # num_blocks = ceil(bytes / self.block_size) - - # Compute the output - retval = self._generate_blocks(num_blocks)[:bytes] - - # Switch to a new key to avoid later compromises of this output (i.e. - # state compromise extension attacks) - self._set_key(self._generate_blocks(self.blocks_per_key)) - - assert len(retval) == bytes - assert len(self.key) == self.key_size - - return retval - - def _generate_blocks(self, num_blocks): - if self.key is None: - raise AssertionError("generator must be seeded before use") - assert 0 <= num_blocks <= self.max_blocks_per_request - retval = [] - for i in range(num_blocks >> 12): # xrange(num_blocks / 4096) - retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros)) - remaining_bytes = (num_blocks & 4095) << self.block_size_shift # (num_blocks % 4095) * self.block_size - retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros[:remaining_bytes])) - return b("").join(retval) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/SHAd256.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/SHAd256.py deleted file mode 100644 index 2e135c9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/SHAd256.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: ascii -*- -# -# Random/Fortuna/SHAd256.py : SHA_d-256 hash function implementation -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""\ -SHA_d-256 hash function implementation. - -This module should comply with PEP 247. -""" - -__revision__ = "$Id$" -__all__ = ['new', 'digest_size'] - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -from binascii import b2a_hex - -from Crypto.Hash import SHA256 - -assert SHA256.digest_size == 32 - -class _SHAd256(object): - """SHA-256, doubled. - - Returns SHA-256(SHA-256(data)). - """ - - digest_size = SHA256.digest_size - - _internal = object() - - def __init__(self, internal_api_check, sha256_hash_obj): - if internal_api_check is not self._internal: - raise AssertionError("Do not instantiate this class directly. Use %s.new()" % (__name__,)) - self._h = sha256_hash_obj - - # PEP 247 "copy" method - def copy(self): - """Return a copy of this hashing object""" - return _SHAd256(SHAd256._internal, self._h.copy()) - - # PEP 247 "digest" method - def digest(self): - """Return the hash value of this object as a binary string""" - retval = SHA256.new(self._h.digest()).digest() - assert len(retval) == 32 - return retval - - # PEP 247 "hexdigest" method - def hexdigest(self): - """Return the hash value of this object as a (lowercase) hexadecimal string""" - retval = b2a_hex(self.digest()) - assert len(retval) == 64 - if sys.version_info[0] == 2: - return retval - else: - return retval.decode() - - # PEP 247 "update" method - def update(self, data): - self._h.update(data) - -# PEP 247 module-level "digest_size" variable -digest_size = _SHAd256.digest_size - -# PEP 247 module-level "new" function -def new(data=None): - """Return a new SHAd256 hashing object""" - if not data: - data=b("") - sha = _SHAd256(_SHAd256._internal, SHA256.new(data)) - sha.new = globals()['new'] - return sha - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/Fortuna/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/__init__.py deleted file mode 100644 index 2fbbecb..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# Random/OSRNG/__init__.py : Platform-independent OS RNG API -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Provides a platform-independent interface to the random number generators -supplied by various operating systems.""" - -__revision__ = "$Id$" - -import os - -if os.name == 'posix': - from Crypto.Random.OSRNG.posix import new -elif os.name == 'nt': - from Crypto.Random.OSRNG.nt import new -elif hasattr(os, 'urandom'): - from Crypto.Random.OSRNG.fallback import new -else: - raise ImportError("Not implemented") - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/fallback.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/fallback.py deleted file mode 100644 index 6d4130d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/fallback.py +++ /dev/null @@ -1,46 +0,0 @@ -# -# Random/OSRNG/fallback.py : Fallback entropy source for systems with os.urandom -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - -__revision__ = "$Id$" -__all__ = ['PythonOSURandomRNG'] - -import os - -from .rng_base import BaseRNG - -class PythonOSURandomRNG(BaseRNG): - - name = "" - - def __init__(self): - self._read = os.urandom - BaseRNG.__init__(self) - - def _close(self): - self._read = None - -def new(*args, **kwargs): - return PythonOSURandomRNG(*args, **kwargs) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/posix.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/posix.py deleted file mode 100644 index ceea7b7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/posix.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# Random/OSRNG/posix.py : OS entropy source for POSIX systems -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - -__revision__ = "$Id$" -__all__ = ['DevURandomRNG'] - -import errno -import os -import stat - -from .rng_base import BaseRNG -from Crypto.Util.py3compat import b - -class DevURandomRNG(BaseRNG): - - def __init__(self, devname=None): - if devname is None: - self.name = "/dev/urandom" - else: - self.name = devname - - # Test that /dev/urandom is a character special device - f = open(self.name, "rb", 0) - fmode = os.fstat(f.fileno())[stat.ST_MODE] - if not stat.S_ISCHR(fmode): - f.close() - raise TypeError("%r is not a character special device" % (self.name,)) - - self.__file = f - - BaseRNG.__init__(self) - - def _close(self): - self.__file.close() - - def _read(self, N): - # Starting with Python 3 open with buffering=0 returns a FileIO object. - # FileIO.read behaves like read(2) and not like fread(3) and thus we - # have to handle the case that read returns less data as requested here - # more carefully. - data = b("") - while len(data) < N: - try: - d = self.__file.read(N - len(data)) - except IOError as e: - # read(2) has been interrupted by a signal; redo the read - if e.errno == errno.EINTR: - continue - raise - - if d is None: - # __file is in non-blocking mode and no data is available - return data - if len(d) == 0: - # __file is in blocking mode and arrived at EOF - return data - - data += d - return data - -def new(*args, **kwargs): - return DevURandomRNG(*args, **kwargs) - - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/rng_base.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/rng_base.py deleted file mode 100644 index 546f2e9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/OSRNG/rng_base.py +++ /dev/null @@ -1,88 +0,0 @@ -# -# Random/OSRNG/rng_base.py : Base class for OSRNG -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -class BaseRNG(object): - - def __init__(self): - self.closed = False - self._selftest() - - def __del__(self): - self.close() - - def _selftest(self): - # Test that urandom can return data - data = self.read(16) - if len(data) != 16: - raise AssertionError("read truncated") - - # Test that we get different data every time (if we don't, the RNG is - # probably malfunctioning) - data2 = self.read(16) - if data == data2: - raise AssertionError("OS RNG returned duplicate data") - - # PEP 343: Support for the "with" statement - def __enter__(self): - pass - def __exit__(self): - """PEP 343 support""" - self.close() - - def close(self): - if not self.closed: - self._close() - self.closed = True - - def flush(self): - pass - - def read(self, N=-1): - """Return N bytes from the RNG.""" - if self.closed: - raise ValueError("I/O operation on closed file") - if not isinstance(N, int): - raise TypeError("an integer is required") - if N < 0: - raise ValueError("cannot read to end of infinite stream") - elif N == 0: - return "" - data = self._read(N) - if len(data) != N: - raise AssertionError("%s produced truncated output (requested %d, got %d)" % (self.name, N, len(data))) - return data - - def _close(self): - raise NotImplementedError("child class must implement this") - - def _read(self, N): - raise NotImplementedError("child class must implement this") - - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/_UserFriendlyRNG.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/_UserFriendlyRNG.py deleted file mode 100644 index 937c17d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/_UserFriendlyRNG.py +++ /dev/null @@ -1,230 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Random/_UserFriendlyRNG.py : A user-friendly random number generator -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -import os -import threading -import struct -import time -from math import floor - -from Crypto.Random import OSRNG -from Crypto.Random.Fortuna import FortunaAccumulator - -class _EntropySource(object): - def __init__(self, accumulator, src_num): - self._fortuna = accumulator - self._src_num = src_num - self._pool_num = 0 - - def feed(self, data): - self._fortuna.add_random_event(self._src_num, self._pool_num, data) - self._pool_num = (self._pool_num + 1) & 31 - -class _EntropyCollector(object): - - def __init__(self, accumulator): - self._osrng = OSRNG.new() - self._osrng_es = _EntropySource(accumulator, 255) - self._time_es = _EntropySource(accumulator, 254) - self._clock_es = _EntropySource(accumulator, 253) - - def reinit(self): - # Add 256 bits to each of the 32 pools, twice. (For a total of 16384 - # bits collected from the operating system.) - for i in range(2): - block = self._osrng.read(32*32) - for p in range(32): - self._osrng_es.feed(block[p*32:(p+1)*32]) - block = None - self._osrng.flush() - - def collect(self): - # Collect 64 bits of entropy from the operating system and feed it to Fortuna. - self._osrng_es.feed(self._osrng.read(8)) - - # Add the fractional part of time.time() - t = time.time() - self._time_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) - - # Add the fractional part of time.clock() - t = time.clock() - self._clock_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) - - -class _UserFriendlyRNG(object): - - def __init__(self): - self.closed = False - self._fa = FortunaAccumulator.FortunaAccumulator() - self._ec = _EntropyCollector(self._fa) - self.reinit() - - def reinit(self): - """Initialize the random number generator and seed it with entropy from - the operating system. - """ - - # Save the pid (helps ensure that Crypto.Random.atfork() gets called) - self._pid = os.getpid() - - # Collect entropy from the operating system and feed it to - # FortunaAccumulator - self._ec.reinit() - - # Override FortunaAccumulator's 100ms minimum re-seed interval. This - # is necessary to avoid a race condition between this function and - # self.read(), which that can otherwise cause forked child processes to - # produce identical output. (e.g. CVE-2013-1445) - # - # Note that if this function can be called frequently by an attacker, - # (and if the bits from OSRNG are insufficiently random) it will weaken - # Fortuna's ability to resist a state compromise extension attack. - self._fa._forget_last_reseed() - - def close(self): - self.closed = True - self._osrng = None - self._fa = None - - def flush(self): - pass - - def read(self, N): - """Return N bytes from the RNG.""" - if self.closed: - raise ValueError("I/O operation on closed file") - if not isinstance(N, int): - raise TypeError("an integer is required") - if N < 0: - raise ValueError("cannot read to end of infinite stream") - - # Collect some entropy and feed it to Fortuna - self._ec.collect() - - # Ask Fortuna to generate some bytes - retval = self._fa.random_data(N) - - # Check that we haven't forked in the meantime. (If we have, we don't - # want to use the data, because it might have been duplicated in the - # parent process. - self._check_pid() - - # Return the random data. - return retval - - def _check_pid(self): - # Lame fork detection to remind developers to invoke Random.atfork() - # after every call to os.fork(). Note that this check is not reliable, - # since process IDs can be reused on most operating systems. - # - # You need to do Random.atfork() in the child process after every call - # to os.fork() to avoid reusing PRNG state. If you want to avoid - # leaking PRNG state to child processes (for example, if you are using - # os.setuid()) then you should also invoke Random.atfork() in the - # *parent* process. - if os.getpid() != self._pid: - raise AssertionError("PID check failed. RNG must be re-initialized after fork(). Hint: Try Random.atfork()") - - -class _LockingUserFriendlyRNG(_UserFriendlyRNG): - def __init__(self): - self._lock = threading.Lock() - _UserFriendlyRNG.__init__(self) - - def close(self): - self._lock.acquire() - try: - return _UserFriendlyRNG.close(self) - finally: - self._lock.release() - - def reinit(self): - self._lock.acquire() - try: - return _UserFriendlyRNG.reinit(self) - finally: - self._lock.release() - - def read(self, bytes): - self._lock.acquire() - try: - return _UserFriendlyRNG.read(self, bytes) - finally: - self._lock.release() - -class RNGFile(object): - def __init__(self, singleton): - self.closed = False - self._singleton = singleton - - # PEP 343: Support for the "with" statement - def __enter__(self): - """PEP 343 support""" - def __exit__(self): - """PEP 343 support""" - self.close() - - def close(self): - # Don't actually close the singleton, just close this RNGFile instance. - self.closed = True - self._singleton = None - - def read(self, bytes): - if self.closed: - raise ValueError("I/O operation on closed file") - return self._singleton.read(bytes) - - def flush(self): - if self.closed: - raise ValueError("I/O operation on closed file") - -_singleton_lock = threading.Lock() -_singleton = None -def _get_singleton(): - global _singleton - _singleton_lock.acquire() - try: - if _singleton is None: - _singleton = _LockingUserFriendlyRNG() - return _singleton - finally: - _singleton_lock.release() - -def new(): - return RNGFile(_get_singleton()) - -def reinit(): - _get_singleton().reinit() - -def get_random_bytes(n): - """Return the specified number of cryptographically-strong random bytes.""" - return _get_singleton().read(n) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/__init__.py deleted file mode 100644 index 659ffee..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Random/__init__.py : PyCrypto random number generation -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" -__all__ = ['new'] - -from Crypto.Random import OSRNG -from Crypto.Random import _UserFriendlyRNG - -def new(*args, **kwargs): - """Return a file-like object that outputs cryptographically random bytes.""" - return _UserFriendlyRNG.new(*args, **kwargs) - -def atfork(): - """Call this whenever you call os.fork()""" - _UserFriendlyRNG.reinit() - -def get_random_bytes(n): - """Return the specified number of cryptographically-strong random bytes.""" - return _UserFriendlyRNG.get_random_bytes(n) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/random.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/random.py deleted file mode 100644 index cd9a221..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Random/random.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Random/random.py : Strong alternative for the standard 'random' module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""A cryptographically strong version of Python's standard "random" module.""" - -__revision__ = "$Id$" -__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] - -from Crypto import Random -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -class StrongRandom(object): - def __init__(self, rng=None, randfunc=None): - if randfunc is None and rng is None: - self._randfunc = None - elif randfunc is not None and rng is None: - self._randfunc = randfunc - elif randfunc is None and rng is not None: - self._randfunc = rng.read - else: - raise ValueError("Cannot specify both 'rng' and 'randfunc'") - - def getrandbits(self, k): - """Return a python long integer with k random bits.""" - if self._randfunc is None: - self._randfunc = Random.new().read - mask = (1 << k) - 1 - return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) - - def randrange(self, *args): - """randrange([start,] stop[, step]): - Return a randomly-selected element from range(start, stop, step).""" - if len(args) == 3: - (start, stop, step) = args - elif len(args) == 2: - (start, stop) = args - step = 1 - elif len(args) == 1: - (stop,) = args - start = 0 - step = 1 - else: - raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) - if (not isinstance(start, int) - or not isinstance(stop, int) - or not isinstance(step, int)): - raise TypeError("randrange requires integer arguments") - if step == 0: - raise ValueError("randrange step argument must not be zero") - - num_choices = ceil_div(stop - start, step) - if num_choices < 0: - num_choices = 0 - if num_choices < 1: - raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) - - # Pick a random number in the range of possible numbers - r = num_choices - while r >= num_choices: - r = self.getrandbits(size(num_choices)) - - return start + (step * r) - - def randint(self, a, b): - """Return a random integer N such that a <= N <= b.""" - if not isinstance(a, int) or not isinstance(b, int): - raise TypeError("randint requires integer arguments") - N = self.randrange(a, b+1) - assert a <= N <= b - return N - - def choice(self, seq): - """Return a random element from a (non-empty) sequence. - - If the seqence is empty, raises IndexError. - """ - if len(seq) == 0: - raise IndexError("empty sequence") - return seq[self.randrange(len(seq))] - - def shuffle(self, x): - """Shuffle the sequence in place.""" - # Make a (copy) of the list of objects we want to shuffle - items = list(x) - - # Choose a random item (without replacement) until all the items have been - # chosen. - for i in range(len(x)): - x[i] = items.pop(self.randrange(len(items))) - - def sample(self, population, k): - """Return a k-length list of unique elements chosen from the population sequence.""" - - num_choices = len(population) - if k > num_choices: - raise ValueError("sample larger than population") - - retval = [] - selected = {} # we emulate a set using a dict here - for i in range(k): - r = None - while r is None or r in selected: - r = self.randrange(num_choices) - retval.append(population[r]) - selected[r] = 1 - return retval - -_r = StrongRandom() -getrandbits = _r.getrandbits -randrange = _r.randrange -randint = _r.randint -choice = _r.choice -shuffle = _r.shuffle -sample = _r.sample - -# These are at the bottom to avoid problems with recursive imports -from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes, size - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/__init__.py deleted file mode 100644 index 63e9c57..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/__init__.py: Self-test for cipher modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for cipher modules""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_XOR; tests += test_XOR.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) - from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/common.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/common.py deleted file mode 100644 index 94183d1..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/common.py +++ /dev/null @@ -1,399 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-testing for PyCrypto hash modules""" - -__revision__ = "$Id$" - -import sys -import unittest -from binascii import a2b_hex, b2a_hex -from Crypto.Util.py3compat import * - -# For compatibility with Python 2.1 and Python 2.2 -if sys.hexversion < 0x02030000: - # Python 2.1 doesn't have a dict() function - # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') - def dict(**kwargs): - return kwargs.copy() -else: - dict = dict - -class _NoDefault: pass # sentinel object -def _extract(d, k, default=_NoDefault): - """Get an item from a dictionary, and remove it from the dictionary.""" - try: - retval = d[k] - except KeyError: - if default is _NoDefault: - raise - return default - del d[k] - return retval - -# Generic cipher test case -class CipherSelfTest(unittest.TestCase): - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - - # Extract the parameters - params = params.copy() - self.description = _extract(params, 'description') - self.key = b(_extract(params, 'key')) - self.plaintext = b(_extract(params, 'plaintext')) - self.ciphertext = b(_extract(params, 'ciphertext')) - self.module_name = _extract(params, 'module_name', None) - - mode = _extract(params, 'mode', None) - self.mode_name = str(mode) - if mode is not None: - # Block cipher - self.mode = getattr(self.module, "MODE_" + mode) - self.iv = _extract(params, 'iv', None) - if self.iv is not None: self.iv = b(self.iv) - - # Only relevant for OPENPGP mode - self.encrypted_iv = _extract(params, 'encrypted_iv', None) - if self.encrypted_iv is not None: - self.encrypted_iv = b(self.encrypted_iv) - else: - # Stream cipher - self.mode = None - self.iv = None - - self.extra_params = params - - def shortDescription(self): - return self.description - - def _new(self, do_decryption=0): - params = self.extra_params.copy() - - # Handle CTR mode parameters. By default, we use Counter.new(self.module.block_size) - if hasattr(self.module, "MODE_CTR") and self.mode == self.module.MODE_CTR: - from Crypto.Util import Counter - ctr_class = _extract(params, 'ctr_class', Counter.new) - ctr_params = _extract(params, 'ctr_params', {}).copy() - if 'prefix' in ctr_params: ctr_params['prefix'] = a2b_hex(b(ctr_params['prefix'])) - if 'suffix' in ctr_params: ctr_params['suffix'] = a2b_hex(b(ctr_params['suffix'])) - if 'nbits' not in ctr_params: - ctr_params['nbits'] = 8*(self.module.block_size - len(ctr_params.get('prefix', '')) - len(ctr_params.get('suffix', ''))) - params['counter'] = ctr_class(**ctr_params) - - if self.mode is None: - # Stream cipher - return self.module.new(a2b_hex(self.key), **params) - elif self.iv is None: - # Block cipher without iv - return self.module.new(a2b_hex(self.key), self.mode, **params) - else: - # Block cipher with iv - if do_decryption and self.mode == self.module.MODE_OPENPGP: - # In PGP mode, the IV to feed for decryption is the *encrypted* one - return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.encrypted_iv), **params) - else: - return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.iv), **params) - - def runTest(self): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - ct1 = b2a_hex(self._new().encrypt(plaintext)) - pt1 = b2a_hex(self._new(1).decrypt(ciphertext)) - ct2 = b2a_hex(self._new().encrypt(plaintext)) - pt2 = b2a_hex(self._new(1).decrypt(ciphertext)) - - if hasattr(self.module, "MODE_OPENPGP") and self.mode == self.module.MODE_OPENPGP: - # In PGP mode, data returned by the first encrypt() - # is prefixed with the encrypted IV. - # Here we check it and then remove it from the ciphertexts. - eilen = len(self.encrypted_iv) - self.assertEqual(self.encrypted_iv, ct1[:eilen]) - self.assertEqual(self.encrypted_iv, ct2[:eilen]) - ct1 = ct1[eilen:] - ct2 = ct2[eilen:] - - self.assertEqual(self.ciphertext, ct1) # encrypt - self.assertEqual(self.ciphertext, ct2) # encrypt (second time) - self.assertEqual(self.plaintext, pt1) # decrypt - self.assertEqual(self.plaintext, pt2) # decrypt (second time) - -class CipherStreamingSelfTest(CipherSelfTest): - - def shortDescription(self): - desc = self.module_name - if self.mode is not None: - desc += " in %s mode" % (self.mode_name,) - return "%s should behave like a stream cipher" % (desc,) - - def runTest(self): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - # The cipher should work like a stream cipher - - # Test counter mode encryption, 3 bytes at a time - ct3 = [] - cipher = self._new() - for i in range(0, len(plaintext), 3): - ct3.append(cipher.encrypt(plaintext[i:i+3])) - ct3 = b2a_hex(b("").join(ct3)) - self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) - - # Test counter mode decryption, 3 bytes at a time - pt3 = [] - cipher = self._new() - for i in range(0, len(ciphertext), 3): - pt3.append(cipher.encrypt(ciphertext[i:i+3])) - # PY3K: This is meant to be text, do not change to bytes (data) - pt3 = b2a_hex(b("").join(pt3)) - self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) - -class CTRSegfaultTest(unittest.TestCase): - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - self.module_name = params.get('module_name', None) - - def shortDescription(self): - return """Regression test: %s.new(key, %s.MODE_CTR) should raise TypeError, not segfault""" % (self.module_name, self.module_name) - - def runTest(self): - self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), self.module.MODE_CTR) - -class CTRWraparoundTest(unittest.TestCase): - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - self.module_name = params.get('module_name', None) - - def shortDescription(self): - return """Regression test: %s with MODE_CTR should raise OverflowError on wraparound when shortcut used""" % (self.module_name,) - - def runTest(self): - from Crypto.Util import Counter - - for disable_shortcut in (0, 1): # (False, True) Test CTR-mode shortcut and PyObject_CallObject code paths - for little_endian in (0, 1): # (False, True) Test both endiannesses - ctr = Counter.new(8*self.module.block_size, initial_value=2**(8*self.module.block_size)-1, little_endian=little_endian, disable_shortcut=disable_shortcut) - cipher = self.module.new(a2b_hex(self.key), self.module.MODE_CTR, counter=ctr) - block = b("\x00") * self.module.block_size - cipher.encrypt(block) - self.assertRaises(OverflowError, cipher.encrypt, block) - -class CFBSegmentSizeTest(unittest.TestCase): - - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - self.description = params['description'] - - def shortDescription(self): - return self.description - - def runTest(self): - """Regression test: m.new(key, m.MODE_CFB, segment_size=N) should require segment_size to be a multiple of 8 bits""" - for i in range(1, 8): - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), self.module.MODE_CFB, segment_size=i) - self.module.new(a2b_hex(self.key), self.module.MODE_CFB, "\0"*self.module.block_size, segment_size=8) # should succeed - -class RoundtripTest(unittest.TestCase): - def __init__(self, module, params): - from Crypto import Random - unittest.TestCase.__init__(self) - self.module = module - self.iv = Random.get_random_bytes(module.block_size) - self.key = b(params['key']) - self.plaintext = 100 * b(params['plaintext']) - self.module_name = params.get('module_name', None) - - def shortDescription(self): - return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) - - def runTest(self): - for mode in (self.module.MODE_ECB, self.module.MODE_CBC, self.module.MODE_CFB, self.module.MODE_OFB, self.module.MODE_OPENPGP): - encryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) - ciphertext = encryption_cipher.encrypt(self.plaintext) - - if mode != self.module.MODE_OPENPGP: - decryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) - else: - eiv = ciphertext[:self.module.block_size+2] - ciphertext = ciphertext[self.module.block_size+2:] - decryption_cipher = self.module.new(a2b_hex(self.key), mode, eiv) - decrypted_plaintext = decryption_cipher.decrypt(ciphertext) - self.assertEqual(self.plaintext, decrypted_plaintext) - -class PGPTest(unittest.TestCase): - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - - def shortDescription(self): - return "MODE_PGP was implemented incorrectly and insecurely. It's completely banished now." - - def runTest(self): - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), - self.module.MODE_PGP) - -class IVLengthTest(unittest.TestCase): - def __init__(self, module, params): - unittest.TestCase.__init__(self) - self.module = module - self.key = b(params['key']) - - def shortDescription(self): - return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" - - def runTest(self): - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), - self.module.MODE_CBC, "") - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), - self.module.MODE_CFB, "") - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), - self.module.MODE_OFB, "") - self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), - self.module.MODE_OPENPGP, "") - self.module.new(a2b_hex(self.key), self.module.MODE_ECB, "") - self.module.new(a2b_hex(self.key), self.module.MODE_CTR, "", counter=self._dummy_counter) - - def _dummy_counter(self): - return "\0" * self.module.block_size - -def make_block_tests(module, module_name, test_data): - tests = [] - extra_tests_added = 0 - for i in range(len(test_data)): - row = test_data[i] - - # Build the "params" dictionary - params = {'mode': 'ECB'} - if len(row) == 3: - (params['plaintext'], params['ciphertext'], params['key']) = row - elif len(row) == 4: - (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row - elif len(row) == 5: - (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row - params.update(extra_params) - else: - raise AssertionError("Unsupported tuple size %d" % (len(row),)) - - # Build the display-name for the test - p2 = params.copy() - p_key = _extract(p2, 'key') - p_plaintext = _extract(p2, 'plaintext') - p_ciphertext = _extract(p2, 'ciphertext') - p_description = _extract(p2, 'description', None) - p_mode = p2.get('mode', 'ECB') - if p_mode == 'ECB': - _extract(p2, 'mode', 'ECB') - - if p_description is not None: - description = p_description - elif p_mode == 'ECB' and not p2: - description = "p=%s, k=%s" % (p_plaintext, p_key) - else: - description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) - name = "%s #%d: %s" % (module_name, i+1, description) - params['description'] = name - params['module_name'] = module_name - - # Add extra test(s) to the test suite before the current test - if not extra_tests_added: - tests += [ - CTRSegfaultTest(module, params), - CTRWraparoundTest(module, params), - CFBSegmentSizeTest(module, params), - RoundtripTest(module, params), - PGPTest(module, params), - IVLengthTest(module, params), - ] - extra_tests_added = 1 - - # Add the current test to the test suite - tests.append(CipherSelfTest(module, params)) - - # When using CTR mode, test that the interface behaves like a stream cipher - if p_mode == 'CTR': - tests.append(CipherStreamingSelfTest(module, params)) - - # When using CTR mode, test the non-shortcut code path. - if p_mode == 'CTR' and 'ctr_class' not in params: - params2 = params.copy() - params2['description'] += " (shortcut disabled)" - ctr_params2 = params.get('ctr_params', {}).copy() - params2['ctr_params'] = ctr_params2 - if 'disable_shortcut' not in params2['ctr_params']: - params2['ctr_params']['disable_shortcut'] = 1 - tests.append(CipherSelfTest(module, params2)) - return tests - -def make_stream_tests(module, module_name, test_data): - tests = [] - for i in range(len(test_data)): - row = test_data[i] - - # Build the "params" dictionary - params = {} - if len(row) == 3: - (params['plaintext'], params['ciphertext'], params['key']) = row - elif len(row) == 4: - (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row - elif len(row) == 5: - (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row - params.update(extra_params) - else: - raise AssertionError("Unsupported tuple size %d" % (len(row),)) - - # Build the display-name for the test - p2 = params.copy() - p_key = _extract(p2, 'key') - p_plaintext = _extract(p2, 'plaintext') - p_ciphertext = _extract(p2, 'ciphertext') - p_description = _extract(p2, 'description', None) - - if p_description is not None: - description = p_description - elif not p2: - description = "p=%s, k=%s" % (p_plaintext, p_key) - else: - description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) - name = "%s #%d: %s" % (module_name, i+1, description) - params['description'] = name - params['module_name'] = module_name - - # Add the test to the test suite - tests.append(CipherSelfTest(module, params)) - tests.append(CipherStreamingSelfTest(module, params)) - return tests - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_AES.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_AES.py deleted file mode 100644 index 63c56d0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_AES.py +++ /dev/null @@ -1,1433 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/AES.py: Self-test for the AES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.AES""" - -__revision__ = "$Id$" - -from .common import dict # For compatibility with Python 2.1 and 2.2 -from Crypto.Util.py3compat import * -from binascii import hexlify - -# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. -test_data = [ - # FIPS PUB 197 test vectors - # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf - - ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', - '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), - - ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', - '000102030405060708090a0b0c0d0e0f1011121314151617', - 'FIPS 197 C.2 (AES-192)'), - - ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', - '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - 'FIPS 197 C.3 (AES-256)'), - - # Rijndael128 test vectors - # Downloaded 2008-09-13 from - # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz - - # ecb_tbl.txt, KEYSIZE=128 - ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=1'), - ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=2'), - ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=3'), - ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=4'), - ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=5'), - ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=6'), - ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=7'), - ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=8'), - ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=9'), - ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-128: I=10'), - ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-128: I=11'), - ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-128: I=12'), - ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', - 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-128: I=13'), - ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', - '04050607090a0b0c0e0f101113141516', - 'ecb-tbl-128: I=14'), - ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', - '2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-128: I=15'), - ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', - '40414243454647484a4b4c4d4f505152', - 'ecb-tbl-128: I=16'), - ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', - '54555657595a5b5c5e5f606163646566', - 'ecb-tbl-128: I=17'), - ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', - '68696a6b6d6e6f70727374757778797a', - 'ecb-tbl-128: I=18'), - ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', - '7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-128: I=19'), - ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-128: I=20'), - ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-128: I=21'), - ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', - '08090a0b0d0e0f10121314151718191a', - 'ecb-tbl-128: I=22'), - ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', - '6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-128: I=23'), - ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', - '80818283858687888a8b8c8d8f909192', - 'ecb-tbl-128: I=24'), - ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', - '94959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-128: I=25'), - ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-128: I=26'), - ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-128: I=27'), - ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-128: I=28'), - ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-128: I=29'), - ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', - 'f8f9fafbfdfefe00020304050708090a', - 'ecb-tbl-128: I=30'), - ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', - '0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-128: I=31'), - ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', - '20212223252627282a2b2c2d2f303132', - 'ecb-tbl-128: I=32'), - ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', - '34353637393a3b3c3e3f404143444546', - 'ecb-tbl-128: I=33'), - ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', - '48494a4b4d4e4f50525354555758595a', - 'ecb-tbl-128: I=34'), - ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', - '5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-128: I=35'), - ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', - '70717273757677787a7b7c7d7f808182', - 'ecb-tbl-128: I=36'), - ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', - '84858687898a8b8c8e8f909193949596', - 'ecb-tbl-128: I=37'), - ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-128: I=38'), - ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-128: I=39'), - ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-128: I=40'), - ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-128: I=41'), - ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', - 'ecb-tbl-128: I=42'), - ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', - 'fcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-128: I=43'), - ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', - '10111213151617181a1b1c1d1f202122', - 'ecb-tbl-128: I=44'), - ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', - '24252627292a2b2c2e2f303133343536', - 'ecb-tbl-128: I=45'), - ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', - '38393a3b3d3e3f40424344454748494a', - 'ecb-tbl-128: I=46'), - ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', - '4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-128: I=47'), - ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', - '60616263656667686a6b6c6d6f707172', - 'ecb-tbl-128: I=48'), - ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', - '74757677797a7b7c7e7f808183848586', - 'ecb-tbl-128: I=49'), - ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', - '88898a8b8d8e8f90929394959798999a', - 'ecb-tbl-128: I=50'), - ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-128: I=51'), - ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-128: I=52'), - ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-128: I=53'), - ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', - 'ecb-tbl-128: I=54'), - ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', - 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-128: I=55'), - ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=56'), - ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=57'), - ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=58'), - ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=59'), - ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=60'), - ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=61'), - ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=62'), - ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=63'), - ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=64'), - ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-128: I=65'), - ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-128: I=66'), - ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-128: I=67'), - ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', - 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-128: I=68'), - ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', - '04050607090a0b0c0e0f101113141516', - 'ecb-tbl-128: I=69'), - ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', - '18191a1b1d1e1f20222324252728292a', - 'ecb-tbl-128: I=70'), - ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', - '2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-128: I=71'), - ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', - '40414243454647484a4b4c4d4f505152', - 'ecb-tbl-128: I=72'), - ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', - '54555657595a5b5c5e5f606163646566', - 'ecb-tbl-128: I=73'), - ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', - '68696a6b6d6e6f70727374757778797a', - 'ecb-tbl-128: I=74'), - ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', - '7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-128: I=75'), - ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', - '90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-128: I=76'), - ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-128: I=77'), - ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', - 'ecb-tbl-128: I=78'), - ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', - 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-128: I=79'), - ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-128: I=80'), - ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', - 'f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-128: I=81'), - ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', - '08090a0b0d0e0f10121314151718191a', - 'ecb-tbl-128: I=82'), - ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', - '1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-128: I=83'), - ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', - '30313233353637383a3b3c3d3f404142', - 'ecb-tbl-128: I=84'), - ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', - '44454647494a4b4c4e4f505153545556', - 'ecb-tbl-128: I=85'), - ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', - '58595a5b5d5e5f60626364656768696a', - 'ecb-tbl-128: I=86'), - ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', - '6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-128: I=87'), - ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', - '80818283858687888a8b8c8d8f909192', - 'ecb-tbl-128: I=88'), - ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', - '94959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-128: I=89'), - ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-128: I=90'), - ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-128: I=91'), - ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-128: I=92'), - ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-128: I=93'), - ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', - 'f8f9fafbfdfefe00020304050708090a', - 'ecb-tbl-128: I=94'), - ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', - '0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-128: I=95'), - ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', - '20212223252627282a2b2c2d2f303132', - 'ecb-tbl-128: I=96'), - ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', - '34353637393a3b3c3e3f404143444546', - 'ecb-tbl-128: I=97'), - ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', - '48494a4b4d4e4f50525354555758595a', - 'ecb-tbl-128: I=98'), - ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', - '5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-128: I=99'), - ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', - '70717273757677787a7b7c7d7f808182', - 'ecb-tbl-128: I=100'), - ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', - '84858687898a8b8c8e8f909193949596', - 'ecb-tbl-128: I=101'), - ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-128: I=102'), - ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-128: I=103'), - ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-128: I=104'), - ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-128: I=105'), - ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', - 'ecb-tbl-128: I=106'), - ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', - 'fcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-128: I=107'), - ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', - '10111213151617181a1b1c1d1f202122', - 'ecb-tbl-128: I=108'), - ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', - '24252627292a2b2c2e2f303133343536', - 'ecb-tbl-128: I=109'), - ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', - '38393a3b3d3e3f40424344454748494a', - 'ecb-tbl-128: I=110'), - ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', - '4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-128: I=111'), - ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', - '60616263656667686a6b6c6d6f707172', - 'ecb-tbl-128: I=112'), - ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', - '74757677797a7b7c7e7f808183848586', - 'ecb-tbl-128: I=113'), - ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', - '88898a8b8d8e8f90929394959798999a', - 'ecb-tbl-128: I=114'), - ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-128: I=115'), - ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-128: I=116'), - ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-128: I=117'), - ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', - 'ecb-tbl-128: I=118'), - ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', - 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-128: I=119'), - ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', - '00010203050607080a0b0c0d0f101112', - 'ecb-tbl-128: I=120'), - ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', - '14151617191a1b1c1e1f202123242526', - 'ecb-tbl-128: I=121'), - ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', - '28292a2b2d2e2f30323334353738393a', - 'ecb-tbl-128: I=122'), - ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', - '3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-128: I=123'), - ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', - '50515253555657585a5b5c5d5f606162', - 'ecb-tbl-128: I=124'), - ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', - '64656667696a6b6c6e6f707173747576', - 'ecb-tbl-128: I=125'), - ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', - '78797a7b7d7e7f80828384858788898a', - 'ecb-tbl-128: I=126'), - ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', - '8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-128: I=127'), - ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-128: I=128'), - - # ecb_tbl.txt, KEYSIZE=192 - ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', - '00010203050607080a0b0c0d0f10111214151617191a1b1c', - 'ecb-tbl-192: I=1'), - ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', - '1e1f20212324252628292a2b2d2e2f30323334353738393a', - 'ecb-tbl-192: I=2'), - ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', - '3c3d3e3f41424344464748494b4c4d4e5051525355565758', - 'ecb-tbl-192: I=3'), - ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', - '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-192: I=4'), - ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', - 'ecb-tbl-192: I=5'), - ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', - '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-192: I=6'), - ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', - 'ecb-tbl-192: I=7'), - ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', - 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-192: I=8'), - ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', - 'ecb-tbl-192: I=9'), - ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', - '0e0f10111314151618191a1b1d1e1f20222324252728292a', - 'ecb-tbl-192: I=10'), - ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', - '2c2d2e2f31323334363738393b3c3d3e4041424345464748', - 'ecb-tbl-192: I=11'), - ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', - '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-192: I=12'), - ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', - 'ecb-tbl-192: I=13'), - ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', - '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-192: I=14'), - ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', - 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', - 'ecb-tbl-192: I=15'), - ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', - 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-192: I=16'), - ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', - 'ecb-tbl-192: I=17'), - ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', - 'fefe01010304050608090a0b0d0e0f10121314151718191a', - 'ecb-tbl-192: I=18'), - ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', - '1c1d1e1f21222324262728292b2c2d2e3031323335363738', - 'ecb-tbl-192: I=19'), - ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', - '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-192: I=20'), - ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', - 'ecb-tbl-192: I=21'), - ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', - '767778797b7c7d7e80818283858687888a8b8c8d8f909192', - 'ecb-tbl-192: I=22'), - ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', - '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', - 'ecb-tbl-192: I=23'), - ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', - 'ecb-tbl-192: I=24'), - ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', - '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-192: I=25'), - ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', - 'ecb-tbl-192: I=26'), - ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', - '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', - 'ecb-tbl-192: I=27'), - ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', - 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-192: I=28'), - ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', - 'ecb-tbl-192: I=29'), - ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', - '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-192: I=30'), - ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', - 'ecb-tbl-192: I=31'), - ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', - '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-192: I=32'), - ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', - '464748494b4c4d4e50515253555657585a5b5c5d5f606162', - 'ecb-tbl-192: I=33'), - ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', - '828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-192: I=34'), - ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', - 'ecb-tbl-192: I=35'), - ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', - 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', - 'ecb-tbl-192: I=36'), - ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', - 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', - 'ecb-tbl-192: I=37'), - ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', - 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-192: I=38'), - ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', - 'ecb-tbl-192: I=39'), - ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', - '363738393b3c3d3e40414243454647484a4b4c4d4f505152', - 'ecb-tbl-192: I=40'), - ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', - '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', - 'ecb-tbl-192: I=41'), - ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', - '727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-192: I=42'), - ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', - 'ecb-tbl-192: I=43'), - ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', - 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', - 'ecb-tbl-192: I=44'), - ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', - 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', - 'ecb-tbl-192: I=45'), - ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', - 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-192: I=46'), - ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', - 'ecb-tbl-192: I=47'), - ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', - '262728292b2c2d2e30313233353637383a3b3c3d3f404142', - 'ecb-tbl-192: I=48'), - ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', - '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', - 'ecb-tbl-192: I=49'), - ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', - '626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-192: I=50'), - ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', - '80818283858687888a8b8c8d8f90919294959697999a9b9c', - 'ecb-tbl-192: I=51'), - ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', - '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', - 'ecb-tbl-192: I=52'), - ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', - 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', - 'ecb-tbl-192: I=53'), - ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', - 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-192: I=54'), - ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', - 'ecb-tbl-192: I=55'), - ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', - '161718191b1c1d1e20212223252627282a2b2c2d2f303132', - 'ecb-tbl-192: I=56'), - ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', - '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', - 'ecb-tbl-192: I=57'), - ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', - '525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-192: I=58'), - ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', - '70717273757677787a7b7c7d7f80818284858687898a8b8c', - 'ecb-tbl-192: I=59'), - ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', - '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', - 'ecb-tbl-192: I=60'), - ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', - 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', - 'ecb-tbl-192: I=61'), - ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', - 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-192: I=62'), - ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', - 'ecb-tbl-192: I=63'), - ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', - '060708090b0c0d0e10111213151617181a1b1c1d1f202122', - 'ecb-tbl-192: I=64'), - ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', - '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', - 'ecb-tbl-192: I=65'), - ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', - '424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-192: I=66'), - ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', - '60616263656667686a6b6c6d6f70717274757677797a7b7c', - 'ecb-tbl-192: I=67'), - ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', - '7e7f80818384858688898a8b8d8e8f90929394959798999a', - 'ecb-tbl-192: I=68'), - ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', - '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', - 'ecb-tbl-192: I=69'), - ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', - 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-192: I=70'), - ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', - 'ecb-tbl-192: I=71'), - ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', - 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', - 'ecb-tbl-192: I=72'), - ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', - '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', - 'ecb-tbl-192: I=73'), - ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', - '323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-192: I=74'), - ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', - '50515253555657585a5b5c5d5f60616264656667696a6b6c', - 'ecb-tbl-192: I=75'), - ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', - '6e6f70717374757678797a7b7d7e7f80828384858788898a', - 'ecb-tbl-192: I=76'), - ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', - '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', - 'ecb-tbl-192: I=77'), - ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', - 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-192: I=78'), - ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', - 'ecb-tbl-192: I=79'), - ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', - 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', - 'ecb-tbl-192: I=80'), - ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', - '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', - 'ecb-tbl-192: I=81'), - ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', - '222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-192: I=82'), - ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', - '40414243454647484a4b4c4d4f50515254555657595a5b5c', - 'ecb-tbl-192: I=83'), - ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', - '5e5f60616364656668696a6b6d6e6f70727374757778797a', - 'ecb-tbl-192: I=84'), - ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', - '7c7d7e7f81828384868788898b8c8d8e9091929395969798', - 'ecb-tbl-192: I=85'), - ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', - '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-192: I=86'), - ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', - 'ecb-tbl-192: I=87'), - ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', - 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', - 'ecb-tbl-192: I=88'), - ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', - 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', - 'ecb-tbl-192: I=89'), - ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', - '121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-192: I=90'), - ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', - '30313233353637383a3b3c3d3f40414244454647494a4b4c', - 'ecb-tbl-192: I=91'), - ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', - '4e4f50515354555658595a5b5d5e5f60626364656768696a', - 'ecb-tbl-192: I=92'), - ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', - '6c6d6e6f71727374767778797b7c7d7e8081828385868788', - 'ecb-tbl-192: I=93'), - ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', - '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-192: I=94'), - ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', - 'ecb-tbl-192: I=95'), - ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', - 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', - 'ecb-tbl-192: I=96'), - ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', - 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', - 'ecb-tbl-192: I=97'), - ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', - '020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-192: I=98'), - ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', - '20212223252627282a2b2c2d2f30313234353637393a3b3c', - 'ecb-tbl-192: I=99'), - ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', - '3e3f40414344454648494a4b4d4e4f50525354555758595a', - 'ecb-tbl-192: I=100'), - ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', - '5c5d5e5f61626364666768696b6c6d6e7071727375767778', - 'ecb-tbl-192: I=101'), - ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', - '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-192: I=102'), - ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', - 'ecb-tbl-192: I=103'), - ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', - 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', - 'ecb-tbl-192: I=104'), - ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', - 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', - 'ecb-tbl-192: I=105'), - ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', - 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-192: I=106'), - ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', - '10111213151617181a1b1c1d1f20212224252627292a2b2c', - 'ecb-tbl-192: I=107'), - ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', - '2e2f30313334353638393a3b3d3e3f40424344454748494a', - 'ecb-tbl-192: I=108'), - ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', - '4c4d4e4f51525354565758595b5c5d5e6061626365666768', - 'ecb-tbl-192: I=109'), - ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', - '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-192: I=110'), - ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', - 'ecb-tbl-192: I=111'), - ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', - 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', - 'ecb-tbl-192: I=112'), - ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', - 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', - 'ecb-tbl-192: I=113'), - ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', - 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-192: I=114'), - ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', - '00010203050607080a0b0c0d0f10111214151617191a1b1c', - 'ecb-tbl-192: I=115'), - ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', - '1e1f20212324252628292a2b2d2e2f30323334353738393a', - 'ecb-tbl-192: I=116'), - ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', - '3c3d3e3f41424344464748494b4c4d4e5051525355565758', - 'ecb-tbl-192: I=117'), - ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', - '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-192: I=118'), - ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', - 'ecb-tbl-192: I=119'), - ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', - '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', - 'ecb-tbl-192: I=120'), - ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', - 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', - 'ecb-tbl-192: I=121'), - ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', - 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-192: I=122'), - ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', - 'ecb-tbl-192: I=123'), - ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', - '0e0f10111314151618191a1b1d1e1f20222324252728292a', - 'ecb-tbl-192: I=124'), - ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', - '2c2d2e2f31323334363738393b3c3d3e4041424345464748', - 'ecb-tbl-192: I=125'), - ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', - '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-192: I=126'), - ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', - 'ecb-tbl-192: I=127'), - ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', - '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', - 'ecb-tbl-192: I=128'), - - # ecb_tbl.txt, KEYSIZE=256 - ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=1'), - ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=2'), - ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=3'), - ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=4'), - ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=5'), - ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=6'), - ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=7'), - ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=8'), - ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=9'), - ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=10'), - ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=11'), - ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=12'), - ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=13'), - ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=14'), - ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=15'), - ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=16'), - ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=17'), - ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=18'), - ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=19'), - ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=20'), - ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=21'), - ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=22'), - ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=23'), - ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=24'), - ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=25'), - ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=26'), - ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=27'), - ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=28'), - ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=29'), - ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=30'), - ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=31'), - ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=32'), - ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=33'), - ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=34'), - ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=35'), - ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=36'), - ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=37'), - ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=38'), - ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=39'), - ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=40'), - ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=41'), - ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=42'), - ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=43'), - ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=44'), - ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=45'), - ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=46'), - ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=47'), - ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=48'), - ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=49'), - ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=50'), - ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=51'), - ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=52'), - ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=53'), - ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=54'), - ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=55'), - ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=56'), - ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=57'), - ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=58'), - ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=59'), - ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=60'), - ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=61'), - ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=62'), - ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=63'), - ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=64'), - ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=65'), - ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=66'), - ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=67'), - ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=68'), - ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=69'), - ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=70'), - ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=71'), - ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=72'), - ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=73'), - ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=74'), - ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=75'), - ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=76'), - ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=77'), - ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=78'), - ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=79'), - ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=80'), - ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=81'), - ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=82'), - ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=83'), - ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=84'), - ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=85'), - ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=86'), - ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=87'), - ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=88'), - ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=89'), - ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=90'), - ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=91'), - ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=92'), - ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=93'), - ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=94'), - ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=95'), - ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=96'), - ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', - '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', - 'ecb-tbl-256: I=97'), - ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', - 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', - 'ecb-tbl-256: I=98'), - ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', - 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', - 'ecb-tbl-256: I=99'), - ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', - 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', - 'ecb-tbl-256: I=100'), - ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', - '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', - 'ecb-tbl-256: I=101'), - ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', - '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', - 'ecb-tbl-256: I=102'), - ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', - '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', - 'ecb-tbl-256: I=103'), - ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', - '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', - 'ecb-tbl-256: I=104'), - ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', - 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', - 'ecb-tbl-256: I=105'), - ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', - 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', - 'ecb-tbl-256: I=106'), - ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', - '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', - 'ecb-tbl-256: I=107'), - ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', - '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', - 'ecb-tbl-256: I=108'), - ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', - '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', - 'ecb-tbl-256: I=109'), - ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', - '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', - 'ecb-tbl-256: I=110'), - ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', - 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', - 'ecb-tbl-256: I=111'), - ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', - 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', - 'ecb-tbl-256: I=112'), - ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', - 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', - 'ecb-tbl-256: I=113'), - ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', - '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', - 'ecb-tbl-256: I=114'), - ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', - '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', - 'ecb-tbl-256: I=115'), - ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', - '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', - 'ecb-tbl-256: I=116'), - ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', - '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', - 'ecb-tbl-256: I=117'), - ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', - 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', - 'ecb-tbl-256: I=118'), - ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', - 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', - 'ecb-tbl-256: I=119'), - ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', - '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', - 'ecb-tbl-256: I=120'), - ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', - '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', - 'ecb-tbl-256: I=121'), - ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', - '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', - 'ecb-tbl-256: I=122'), - ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', - '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', - 'ecb-tbl-256: I=123'), - ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', - 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', - 'ecb-tbl-256: I=124'), - ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', - 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', - 'ecb-tbl-256: I=125'), - ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', - '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', - 'ecb-tbl-256: I=126'), - ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', - '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', - 'ecb-tbl-256: I=127'), - ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', - '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', - 'ecb-tbl-256: I=128'), - - # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ - '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.1.1, ECB and AES-128'), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ - 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.1.3, ECB and AES-192'), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ - 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.1.3, ECB and AES-256'), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '7649abac8119b246cee98e9b12e9197d'+'5086cb9b507219ee95db113a917678b2'+ - '73bed6b8e3c1743b7116e69e22229516'+'3ff1caa1681fac09120eca307586e1a7', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.2.1, CBC and AES-128', - dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '4f021db243bc633d7178183a9fa071e8'+'b4d9ada9ad7dedf4e5e738763f69145a'+ - '571b242012fb7ae07fa9baac3df102e0'+'08b0e27988598881d920a9e64f5615cd', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.2.1, CBC and AES-192', - dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'f58c4c04d6e5f1ba779eabfb5f7bfbd6'+'9cfc4e967edb808d679f777bc6702c7d'+ - '39f23369a9d9bacfa530e26304231461'+'b2eb05e2c39be9fcda6c19078c6a9d1b', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.2.1, CBC and AES-256', - dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), - - # Skip CFB-1 since it is not supported by PyCrypto - - ('6bc1bee22e409f96e93d7e117393172aae2d','3b79424c9c0dd436bace9e0ed4586a4f32b9', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.3.7, CFB-8 and AES-128', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), - - ('6bc1bee22e409f96e93d7e117393172aae2d','cda2521ef0a905ca44cd057cbf0d47a0678a', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.3.9, CFB-8 and AES-192', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), - - ('6bc1bee22e409f96e93d7e117393172aae2d','dc1f1a8520a64db55fcc8ac554844e889700', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.3.11, CFB-8 and AES-256', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '3b3fd92eb72dad20333449f8e83cfb4a'+'c8a64537a0b3a93fcde3cdad9f1ce58b'+ - '26751f67a3cbb140b1808cf187a4f4df'+'c04b05357c5d1c0eeac4c66f9ff7f2e6', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.3.13, CFB-128 and AES-128', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'cdc80d6fddf18cab34c25909c99a4174'+'67ce7f7f81173621961a2b70171d3d7a'+ - '2e1e8a1dd59b88b1c8e60fed1efac4c9'+'c05f9f9ca9834fa042ae8fba584b09ff', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.3.15, CFB-128 and AES-192', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'dc7e84bfda79164b7ecd8486985d3860'+'39ffed143b28b1c832113c6331e5407b'+ - 'df10132415e54b92a13ed0a8267ae2f9'+'75a385741ab9cef82031623d55b1e471', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.3.17, CFB-128 and AES-256', - dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '3b3fd92eb72dad20333449f8e83cfb4a'+'7789508d16918f03f53c52dac54ed825'+ - '9740051e9c5fecf64344f7a82260edcc'+'304c6528f659c77866a510d9c1d6ae5e', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.4.1, OFB and AES-128', - dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'cdc80d6fddf18cab34c25909c99a4174'+'fcc28b8d4c63837c09e81700c1100401'+ - '8d9a9aeac0f6596f559c6d4daf59a5f2'+'6d9f200857ca6c3e9cac524bd9acc92a', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.4.3, OFB and AES-192', - dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - 'dc7e84bfda79164b7ecd8486985d3860'+'4febdc6740d20b3ac88f6ad82a4fb08d'+ - '71ab47a086e86eedf39d1c5bba97c408'+'0126141d67f37be8538f5a8be740e484', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.4.5, OFB and AES-256', - dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '874d6191b620e3261bef6864990db6ce'+'9806f66b7970fdff8617187bb9fffdff'+ - '5ae4df3edbd5d35e5b4f09020db03eab'+'1e031dda2fbe03d1792170a0f3009cee', - '2b7e151628aed2a6abf7158809cf4f3c', - 'NIST 800-38A, F.5.1, CTR and AES-128', - dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '1abc932417521ca24f2b0459fe7e6e0b'+'090339ec0aa6faefd5ccc2c6f4ce8e94'+ - '1e36b26bd1ebc670d1bd1d665620abf7'+'4f78a7f6d29809585a97daec58c6b050', - '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', - 'NIST 800-38A, F.5.3, CTR and AES-192', - dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), - - ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ - '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', - '601ec313775789a5b7a7f504bbf3d228'+'f443e3ca4d62b59aca84e990cacaf5c5'+ - '2b0930daa23de94ce87017ba2d84988d'+'dfc9c58db67aada613c2dd08457941a6', - '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', - 'NIST 800-38A, F.5.5, CTR and AES-256', - dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), - - # RFC 3686 test vectors - # This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. - ('53696e676c6520626c6f636b206d7367', 'e4095d4fb7a7b3792d6175a3261311b8', - 'ae6852f8121067cc4bf7a5765577f39e', - 'RFC 3686 Test Vector #1: Encrypting 16 octets using AES-CTR with 128-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000030'+'0000000000000000'))), - ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', - '7e24067817fae0d743d6ce1f32539163', - 'RFC 3686 Test Vector #2: Encrypting 32 octets using AES-CTR with 128-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='006cb6db'+'c0543b59da48d90b'))), - ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', - 'c1cf48a89f2ffdd9cf4652e9efdb72d7'+'4540a42bde6d7836d59a5ceaaef31053'+'25b2072f', - '7691be035e5020a8ac6e618529f9a0dc', - 'RFC 3686 Test Vector #3: Encrypting 36 octets using AES-CTR with 128-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00e0017b'+'27777f3f4a1786f0'))), - ('53696e676c6520626c6f636b206d7367', - '4b55384fe259c9c84e7935a003cbe928', - '16af5b145fc9f579c175f93e3bfb0eed'+'863d06ccfdb78515', - 'RFC 3686 Test Vector #4: Encrypting 16 octets using AES-CTR with 192-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000048'+'36733c147d6d93cb'))), - ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', - '453243fc609b23327edfaafa7131cd9f'+'8490701c5ad4a79cfc1fe0ff42f4fb00', - '7c5cb2401b3dc33c19e7340819e0f69c'+'678c3db8e6f6a91a', - 'RFC 3686 Test Vector #5: Encrypting 32 octets using AES-CTR with 192-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0096b03b'+'020c6eadc2cb500d'))), - ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', - '96893fc55e5c722f540b7dd1ddf7e758'+'d288bc95c69165884536c811662f2188'+'abee0935', - '02bf391ee8ecb159b959617b0965279b'+'f59b60a786d3e0fe', - 'RFC 3686 Test Vector #6: Encrypting 36 octets using AES-CTR with 192-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0007bdfd'+'5cbd60278dcc0912'))), - ('53696e676c6520626c6f636b206d7367', - '145ad01dbf824ec7560863dc71e3e0c0', - '776beff2851db06f4c8a0542c8696f6c'+'6a81af1eec96b4d37fc1d689e6c1c104', - 'RFC 3686 Test Vector #7: Encrypting 16 octets using AES-CTR with 256-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000060'+'db5672c97aa8f0b2'))), - ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', - 'f05e231b3894612c49ee000b804eb2a9'+'b8306b508f839d6a5530831d9344af1c', - 'f6d66d6bd52d59bb0796365879eff886'+'c66dd51a5b6a99744b50590c87a23884', - 'RFC 3686 Test Vector #8: Encrypting 32 octets using AES-CTR with 256-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00faac24'+'c1585ef15a43d875'))), - ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', - 'eb6c52821d0bbbf7ce7594462aca4faa'+'b407df866569fd07f48cc0b583d6071f'+'1ec0e6b8', - 'ff7a617ce69148e4f1726e2f43581de2'+'aa62d9f805532edff1eed687fb54153d', - 'RFC 3686 Test Vector #9: Encrypting 36 octets using AES-CTR with 256-bit key', - dict(mode='CTR', ctr_params=dict(nbits=32, prefix='001cc5b7'+'51a51d70a1c11148'))), - - # The following test vectors have been generated with gpg v1.4.0. - # The command line used was: - # - # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ - # --disable-mdc --s2k-mode 0 --output ct pt - # - # As result, the content of the file 'pt' is encrypted with a key derived - # from 'secret_passphrase' and written to file 'ct'. - # Test vectors must be extracted from 'ct', which is a collection of - # TLVs (see RFC4880 for all details): - # - the encrypted data (with the encrypted IV as prefix) is the payload - # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). - # This is the ciphertext in the test vector. - # - inside the encrypted part, there is a further layer of TLVs. One must - # look for tag 11 (Literal Data Packet); in its payload, after a short - # but time dependent header, there is the content of file 'pt'. - # In the test vector, the plaintext is the complete set of TLVs that gets - # encrypted. It is not just the content of 'pt'. - # - the key is the leftmost 16 bytes of the SHA1 digest of the password. - # The test vector contains such shortened digest. - # - # Note that encryption uses a clear IV, and decryption an encrypted IV - ( 'ac18620270744fb4f647426c61636b4361745768697465436174', # Plaintext, 'BlackCatWhiteCat' - 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb', # Ciphertext - '5baa61e4c9b93f3f0682250b6cf8331b', # Key (hash of 'password') - 'GPG Test Vector #1', - dict(mode='OPENPGP', iv='3d7d3e62282add7eb203eeba5c800733', encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' ) ), -] - -def get_tests(config={}): - from Crypto.Cipher import AES - from .common import make_block_tests - return make_block_tests(AES, "AES", test_data) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py deleted file mode 100644 index eadcca4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.ARC2""" - -__revision__ = "$Id$" - -from .common import dict # For compatibility with Python 2.1 and 2.2 - -import unittest -from Crypto.Util.py3compat import * - -# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. -test_data = [ - # Test vectors from RFC 2268 - - # 63-bit effective key length - ('0000000000000000', 'ebb773f993278eff', '0000000000000000', - 'RFC2268-1', dict(effective_keylen=63)), - - # 64-bit effective key length - ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', - 'RFC2268-2', dict(effective_keylen=64)), - ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', - 'RFC2268-3', dict(effective_keylen=64)), - ('0000000000000000', '61a8a244adacccf0', '88', - 'RFC2268-4', dict(effective_keylen=64)), - ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', - 'RFC2268-5', dict(effective_keylen=64)), - ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', - 'RFC2268-6', dict(effective_keylen=64)), - - # 128-bit effective key length - ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', - "RFC2268-7", dict(effective_keylen=128)), - ('0000000000000000', '5b78d3a43dfff1f1', - '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', - "RFC2268-8", dict(effective_keylen=129)), - - # Test vectors from PyCrypto 2.0.1's testdata.py - # 1024-bit effective key length - ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', - 'PCTv201-0'), - ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', - 'PCTv201-1'), - ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', - 'PCTv201-2'), - ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', - 'PCTv201-3'), - ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', - 'PCTv201-4'), - ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', - 'PCTv201-5'), - ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', - 'PCTv201-6'), - ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', - 'PCTv201-7'), - ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-8'), - ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-9'), - ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-10'), - ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', - 'PCTv201-11'), - ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', - 'PCTv201-12'), - ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', - 'PCTv201-13'), - ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', - 'PCTv201-14'), - ('0011223344556677', '584644c34503122c', '53e5ffe553', - 'PCTv201-15'), -] - -class BufferOverflowTest(unittest.TestCase): - # Test a buffer overflow found in older versions of PyCrypto - - def setUp(self): - global ARC2 - from Crypto.Cipher import ARC2 - - def runTest(self): - """ARC2 with keylength > 128""" - key = "x" * 16384 - mode = ARC2.MODE_ECB - self.assertRaises(ValueError, ARC2.new, key, mode) - -def get_tests(config={}): - from Crypto.Cipher import ARC2 - from .common import make_block_tests - - tests = make_block_tests(ARC2, "ARC2", test_data) - tests.append(BufferOverflowTest()) - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py deleted file mode 100644 index 2ab400e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.ARC4""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (plaintext, ciphertext, key[, description]) tuples. -test_data = [ - # Test vectors from Eric Rescorla's message with the subject - # "RC4 compatibility testing", sent to the cipherpunks mailing list on - # September 13, 1994. - # http://cypherpunks.venona.com/date/1994/09/msg00420.html - - ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', - 'Test vector 0'), - - ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', - 'Test vector 1'), - - ('0000000000000000', 'de188941a3375d3a', '0000000000000000', - 'Test vector 2'), - - ('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', - 'Test vector 3'), - - ('01' * 512, - '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' - + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' - + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' - + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' - + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' - + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' - + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' - + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' - + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' - + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' - + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' - + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' - + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' - + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' - + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' - + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', - '0123456789abcdef', - "Test vector 4"), -] - -def get_tests(config={}): - from Crypto.Cipher import ARC4 - from .common import make_stream_tests - return make_stream_tests(ARC4, "ARC4", test_data) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py deleted file mode 100644 index f0e6592..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py +++ /dev/null @@ -1,113 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.Blowfish""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (plaintext, ciphertext, key) tuples. -test_data = [ - # Test vectors from http://www.schneier.com/code/vectors.txt - ('0000000000000000', '4ef997456198dd78', '0000000000000000'), - ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), - ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), - ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), - ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), - ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), - ('0000000000000000', '4ef997456198dd78', '0000000000000000'), - ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), - ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), - ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), - ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), - ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), - ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), - ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), - ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), - ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), - ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), - ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), - ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), - ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), - ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), - ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), - ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), - ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), - ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), - ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), - ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), - ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), - ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), - ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), - ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), - ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), - ('0000000000000000', '245946885754369a', '0123456789abcdef'), - ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), - ('fedcba9876543210', 'f9ad597c49db005e', 'f0'), - ('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), - ('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), - ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), - ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), - ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), - ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), - ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), - ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), - ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), - ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), - ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), - ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), - ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), - ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), - ('fedcba9876543210', '93142887ee3be15c', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), - ('fedcba9876543210', '03429e838ce2d14b', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), - ('fedcba9876543210', 'a4299e27469ff67b', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), - ('fedcba9876543210', 'afd5aed1c1bc96a8', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), - ('fedcba9876543210', '10851c0e3858da9f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), - ('fedcba9876543210', 'e6f51ed79b9db21f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), - ('fedcba9876543210', '64a6e14afd36b46f', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), - ('fedcba9876543210', '80c7d7d45a5479ad', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), - ('fedcba9876543210', '05044b62fa52d080', - 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), -] - -def get_tests(config={}): - from Crypto.Cipher import Blowfish - from .common import make_block_tests - return make_block_tests(Blowfish, "Blowfish", test_data) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_CAST.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_CAST.py deleted file mode 100644 index fee5c99..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_CAST.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.CAST""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (plaintext, ciphertext, key) tuples. -test_data = [ - # Test vectors from RFC 2144, B.1 - ('0123456789abcdef', '238b4fe5847e44b2', - '0123456712345678234567893456789a', - '128-bit key'), - - ('0123456789abcdef', 'eb6a711a2c02271b', - '01234567123456782345', - '80-bit key'), - - ('0123456789abcdef', '7ac816d16e9b302e', - '0123456712', - '40-bit key'), -] - -def get_tests(config={}): - from Crypto.Cipher import CAST - from .common import make_block_tests - return make_block_tests(CAST, "CAST", test_data) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES.py deleted file mode 100644 index f31678f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES.py +++ /dev/null @@ -1,339 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.DES""" - -__revision__ = "$Id$" - -from .common import dict # For compatibility with Python 2.1 and 2.2 -from Crypto.Util.py3compat import * -import unittest - -# This is a list of (plaintext, ciphertext, key, description) tuples. -SP800_17_B1_KEY = '01' * 8 -SP800_17_B2_PT = '00' * 8 -test_data = [ - # Test vectors from Appendix A of NIST SP 800-17 - # "Modes of Operation Validation System (MOVS): Requirements and Procedures" - # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf - - # Appendix A - "Sample Round Outputs for the DES" - ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', - "NIST SP800-17 A"), - - # Table B.1 - Variable Plaintext Known Answer Test - ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #0'), - ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #1'), - ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #2'), - ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #3'), - ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #4'), - ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #5'), - ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #6'), - ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #7'), - ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #8'), - ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #9'), - ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #10'), - ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #11'), - ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #12'), - ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #13'), - ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #14'), - ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #15'), - ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #16'), - ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #17'), - ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #18'), - ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #19'), - ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #20'), - ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #21'), - ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #22'), - ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #23'), - ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #24'), - ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #25'), - ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #26'), - ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #27'), - ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #28'), - ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #29'), - ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #30'), - ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #31'), - ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #32'), - ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #33'), - ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #34'), - ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #35'), - ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #36'), - ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #37'), - ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #38'), - ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #39'), - ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #40'), - ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #41'), - ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #42'), - ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #43'), - ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #44'), - ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #45'), - ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #46'), - ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #47'), - ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #48'), - ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #49'), - ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #50'), - ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #51'), - ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #52'), - ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #53'), - ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #54'), - ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #55'), - ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #56'), - ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #57'), - ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #58'), - ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #59'), - ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #60'), - ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #61'), - ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #62'), - ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, - 'NIST SP800-17 B.1 #63'), - - # Table B.2 - Variable Key Known Answer Test - (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', - 'NIST SP800-17 B.2 #0'), - (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', - 'NIST SP800-17 B.2 #1'), - (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', - 'NIST SP800-17 B.2 #2'), - (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', - 'NIST SP800-17 B.2 #3'), - (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', - 'NIST SP800-17 B.2 #4'), - (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', - 'NIST SP800-17 B.2 #5'), - (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', - 'NIST SP800-17 B.2 #6'), - (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', - 'NIST SP800-17 B.2 #7'), - (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', - 'NIST SP800-17 B.2 #8'), - (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', - 'NIST SP800-17 B.2 #9'), - (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', - 'NIST SP800-17 B.2 #10'), - (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', - 'NIST SP800-17 B.2 #11'), - (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', - 'NIST SP800-17 B.2 #12'), - (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', - 'NIST SP800-17 B.2 #13'), - (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', - 'NIST SP800-17 B.2 #14'), - (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', - 'NIST SP800-17 B.2 #15'), - (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', - 'NIST SP800-17 B.2 #16'), - (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', - 'NIST SP800-17 B.2 #17'), - (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', - 'NIST SP800-17 B.2 #18'), - (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', - 'NIST SP800-17 B.2 #19'), - (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', - 'NIST SP800-17 B.2 #20'), - (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', - 'NIST SP800-17 B.2 #21'), - (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', - 'NIST SP800-17 B.2 #22'), - (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', - 'NIST SP800-17 B.2 #23'), - (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', - 'NIST SP800-17 B.2 #24'), - (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', - 'NIST SP800-17 B.2 #25'), - (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', - 'NIST SP800-17 B.2 #26'), - (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', - 'NIST SP800-17 B.2 #27'), - (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', - 'NIST SP800-17 B.2 #28'), - (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', - 'NIST SP800-17 B.2 #29'), - (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', - 'NIST SP800-17 B.2 #30'), - (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', - 'NIST SP800-17 B.2 #31'), - (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', - 'NIST SP800-17 B.2 #32'), - (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', - 'NIST SP800-17 B.2 #33'), - (SP800_17_B2_PT, '5570530829705592', '0101010102010101', - 'NIST SP800-17 B.2 #34'), - (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', - 'NIST SP800-17 B.2 #35'), - (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', - 'NIST SP800-17 B.2 #36'), - (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', - 'NIST SP800-17 B.2 #37'), - (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', - 'NIST SP800-17 B.2 #38'), - (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', - 'NIST SP800-17 B.2 #39'), - (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', - 'NIST SP800-17 B.2 #40'), - (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', - 'NIST SP800-17 B.2 #41'), - (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', - 'NIST SP800-17 B.2 #42'), - (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', - 'NIST SP800-17 B.2 #43'), - (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', - 'NIST SP800-17 B.2 #44'), - (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', - 'NIST SP800-17 B.2 #45'), - (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', - 'NIST SP800-17 B.2 #46'), - (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', - 'NIST SP800-17 B.2 #47'), - (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', - 'NIST SP800-17 B.2 #48'), - (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', - 'NIST SP800-17 B.2 #49'), - (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', - 'NIST SP800-17 B.2 #50'), - (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', - 'NIST SP800-17 B.2 #51'), - (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', - 'NIST SP800-17 B.2 #52'), - (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', - 'NIST SP800-17 B.2 #53'), - (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', - 'NIST SP800-17 B.2 #54'), - (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', - 'NIST SP800-17 B.2 #55'), -] - -class RonRivestTest(unittest.TestCase): - """ Ronald L. Rivest's DES test, see - http://people.csail.mit.edu/rivest/Destest.txt - ABSTRACT - -------- - - We present a simple way to test the correctness of a DES implementation: - Use the recurrence relation: - - X0 = 9474B8E8C73BCA7D (hexadecimal) - - X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) - - to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here - E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes - the DES decryption of X using key K. If you obtain - - X16 = 1B1A2DDB4C642438 - - your implementation does not have any of the 36,568 possible single-fault - errors described herein. - """ - def runTest(self): - from Crypto.Cipher import DES - from binascii import b2a_hex - - X = [] - X[0:] = [b('\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D')] - - for i in range(16): - c = DES.new(X[i],DES.MODE_ECB) - if not (i&1): # (num&1) returns 1 for odd numbers - X[i+1:] = [c.encrypt(X[i])] # even - else: - X[i+1:] = [c.decrypt(X[i])] # odd - - self.assertEqual(b2a_hex(X[16]), - b2a_hex(b('\x1B\x1A\x2D\xDB\x4C\x64\x24\x38'))) - -def get_tests(config={}): - from Crypto.Cipher import DES - from .common import make_block_tests - return make_block_tests(DES, "DES", test_data) + [RonRivestTest()] - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES3.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES3.py deleted file mode 100644 index 83d1f47..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_DES3.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.DES3""" - -__revision__ = "$Id$" - -from .common import dict # For compatibility with Python 2.1 and 2.2 -from Crypto.Util.py3compat import * -from binascii import hexlify - -# This is a list of (plaintext, ciphertext, key, description) tuples. -SP800_20_A1_KEY = '01' * 24 -SP800_20_A2_PT = '00' * 8 -test_data = [ - # Test vector from Appendix B of NIST SP 800-67 - # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block - # Cipher" - # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf - ('54686520717566636b2062726f776e20666f78206a756d70', - 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', - '0123456789abcdef23456789abcdef01456789abcdef0123', - 'NIST SP800-67 B.1'), - - # Test vectors "The Multi-block Message Test (MMT) for DES and TDES" - # http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf - ('326a494cd33fe756', 'b22b8d66de970692', - '627f460e08104a1043cd265d5840eaf1313edf97df2a8a8c', - 'DESMMT #1', dict(mode='CBC', iv='8e29f75ea77e5475')), - - ('84401f78fe6c10876d8ea23094ea5309', '7b1f7c7e3b1c948ebd04a75ffba7d2f5', - '37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae', - 'DESMMT #2', dict(mode='CBC', iv='3d1de3cc132e3b65')), - - # Test vectors from Appendix A of NIST SP 800-20 - # "Modes of Operation Validation System for the Triple Data Encryption - # Algorithm (TMOVS): Requirements and Procedures" - # http://csrc.nist.gov/publications/nistpubs/800-20/800-20.pdf - - # Table A.1 - Variable Plaintext Known Answer Test - ('8000000000000000', '95f8a5e5dd31d900', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #0'), - ('4000000000000000', 'dd7f121ca5015619', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #1'), - ('2000000000000000', '2e8653104f3834ea', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #2'), - ('1000000000000000', '4bd388ff6cd81d4f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #3'), - ('0800000000000000', '20b9e767b2fb1456', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #4'), - ('0400000000000000', '55579380d77138ef', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #5'), - ('0200000000000000', '6cc5defaaf04512f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #6'), - ('0100000000000000', '0d9f279ba5d87260', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #7'), - ('0080000000000000', 'd9031b0271bd5a0a', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #8'), - ('0040000000000000', '424250b37c3dd951', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #9'), - ('0020000000000000', 'b8061b7ecd9a21e5', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #10'), - ('0010000000000000', 'f15d0f286b65bd28', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #11'), - ('0008000000000000', 'add0cc8d6e5deba1', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #12'), - ('0004000000000000', 'e6d5f82752ad63d1', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #13'), - ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #14'), - ('0001000000000000', 'f356834379d165cd', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #15'), - ('0000800000000000', '2b9f982f20037fa9', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #16'), - ('0000400000000000', '889de068a16f0be6', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #17'), - ('0000200000000000', 'e19e275d846a1298', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #18'), - ('0000100000000000', '329a8ed523d71aec', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #19'), - ('0000080000000000', 'e7fce22557d23c97', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #20'), - ('0000040000000000', '12a9f5817ff2d65d', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #21'), - ('0000020000000000', 'a484c3ad38dc9c19', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #22'), - ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #23'), - ('0000008000000000', '750d079407521363', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #24'), - ('0000004000000000', '64feed9c724c2faf', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #25'), - ('0000002000000000', 'f02b263b328e2b60', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #26'), - ('0000001000000000', '9d64555a9a10b852', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #27'), - ('0000000800000000', 'd106ff0bed5255d7', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #28'), - ('0000000400000000', 'e1652c6b138c64a5', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #29'), - ('0000000200000000', 'e428581186ec8f46', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #30'), - ('0000000100000000', 'aeb5f5ede22d1a36', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #31'), - ('0000000080000000', 'e943d7568aec0c5c', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #32'), - ('0000000040000000', 'df98c8276f54b04b', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #33'), - ('0000000020000000', 'b160e4680f6c696f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #34'), - ('0000000010000000', 'fa0752b07d9c4ab8', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #35'), - ('0000000008000000', 'ca3a2b036dbc8502', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #36'), - ('0000000004000000', '5e0905517bb59bcf', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #37'), - ('0000000002000000', '814eeb3b91d90726', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #38'), - ('0000000001000000', '4d49db1532919c9f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #39'), - ('0000000000800000', '25eb5fc3f8cf0621', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #40'), - ('0000000000400000', 'ab6a20c0620d1c6f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #41'), - ('0000000000200000', '79e90dbc98f92cca', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #42'), - ('0000000000100000', '866ecedd8072bb0e', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #43'), - ('0000000000080000', '8b54536f2f3e64a8', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #44'), - ('0000000000040000', 'ea51d3975595b86b', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #45'), - ('0000000000020000', 'caffc6ac4542de31', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #46'), - ('0000000000010000', '8dd45a2ddf90796c', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #47'), - ('0000000000008000', '1029d55e880ec2d0', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #48'), - ('0000000000004000', '5d86cb23639dbea9', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #49'), - ('0000000000002000', '1d1ca853ae7c0c5f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #50'), - ('0000000000001000', 'ce332329248f3228', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #51'), - ('0000000000000800', '8405d1abe24fb942', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #52'), - ('0000000000000400', 'e643d78090ca4207', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #53'), - ('0000000000000200', '48221b9937748a23', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #54'), - ('0000000000000100', 'dd7c0bbd61fafd54', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #55'), - ('0000000000000080', '2fbc291a570db5c4', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #56'), - ('0000000000000040', 'e07c30d7e4e26e12', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #57'), - ('0000000000000020', '0953e2258e8e90a1', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #58'), - ('0000000000000010', '5b711bc4ceebf2ee', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #59'), - ('0000000000000008', 'cc083f1e6d9e85f6', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #60'), - ('0000000000000004', 'd2fd8867d50d2dfe', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #61'), - ('0000000000000002', '06e7ea22ce92708f', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #62'), - ('0000000000000001', '166b40b44aba4bd6', SP800_20_A1_KEY, - 'NIST SP800-20 A.1 #63'), - - # Table A.2 - Variable Key Known Answer Test - (SP800_20_A2_PT, '95a8d72813daa94d', '8001010101010101'*3, - 'NIST SP800-20 A.2 #0'), - (SP800_20_A2_PT, '0eec1487dd8c26d5', '4001010101010101'*3, - 'NIST SP800-20 A.2 #1'), - (SP800_20_A2_PT, '7ad16ffb79c45926', '2001010101010101'*3, - 'NIST SP800-20 A.2 #2'), - (SP800_20_A2_PT, 'd3746294ca6a6cf3', '1001010101010101'*3, - 'NIST SP800-20 A.2 #3'), - (SP800_20_A2_PT, '809f5f873c1fd761', '0801010101010101'*3, - 'NIST SP800-20 A.2 #4'), - (SP800_20_A2_PT, 'c02faffec989d1fc', '0401010101010101'*3, - 'NIST SP800-20 A.2 #5'), - (SP800_20_A2_PT, '4615aa1d33e72f10', '0201010101010101'*3, - 'NIST SP800-20 A.2 #6'), - (SP800_20_A2_PT, '2055123350c00858', '0180010101010101'*3, - 'NIST SP800-20 A.2 #7'), - (SP800_20_A2_PT, 'df3b99d6577397c8', '0140010101010101'*3, - 'NIST SP800-20 A.2 #8'), - (SP800_20_A2_PT, '31fe17369b5288c9', '0120010101010101'*3, - 'NIST SP800-20 A.2 #9'), - (SP800_20_A2_PT, 'dfdd3cc64dae1642', '0110010101010101'*3, - 'NIST SP800-20 A.2 #10'), - (SP800_20_A2_PT, '178c83ce2b399d94', '0108010101010101'*3, - 'NIST SP800-20 A.2 #11'), - (SP800_20_A2_PT, '50f636324a9b7f80', '0104010101010101'*3, - 'NIST SP800-20 A.2 #12'), - (SP800_20_A2_PT, 'a8468ee3bc18f06d', '0102010101010101'*3, - 'NIST SP800-20 A.2 #13'), - (SP800_20_A2_PT, 'a2dc9e92fd3cde92', '0101800101010101'*3, - 'NIST SP800-20 A.2 #14'), - (SP800_20_A2_PT, 'cac09f797d031287', '0101400101010101'*3, - 'NIST SP800-20 A.2 #15'), - (SP800_20_A2_PT, '90ba680b22aeb525', '0101200101010101'*3, - 'NIST SP800-20 A.2 #16'), - (SP800_20_A2_PT, 'ce7a24f350e280b6', '0101100101010101'*3, - 'NIST SP800-20 A.2 #17'), - (SP800_20_A2_PT, '882bff0aa01a0b87', '0101080101010101'*3, - 'NIST SP800-20 A.2 #18'), - (SP800_20_A2_PT, '25610288924511c2', '0101040101010101'*3, - 'NIST SP800-20 A.2 #19'), - (SP800_20_A2_PT, 'c71516c29c75d170', '0101020101010101'*3, - 'NIST SP800-20 A.2 #20'), - (SP800_20_A2_PT, '5199c29a52c9f059', '0101018001010101'*3, - 'NIST SP800-20 A.2 #21'), - (SP800_20_A2_PT, 'c22f0a294a71f29f', '0101014001010101'*3, - 'NIST SP800-20 A.2 #22'), - (SP800_20_A2_PT, 'ee371483714c02ea', '0101012001010101'*3, - 'NIST SP800-20 A.2 #23'), - (SP800_20_A2_PT, 'a81fbd448f9e522f', '0101011001010101'*3, - 'NIST SP800-20 A.2 #24'), - (SP800_20_A2_PT, '4f644c92e192dfed', '0101010801010101'*3, - 'NIST SP800-20 A.2 #25'), - (SP800_20_A2_PT, '1afa9a66a6df92ae', '0101010401010101'*3, - 'NIST SP800-20 A.2 #26'), - (SP800_20_A2_PT, 'b3c1cc715cb879d8', '0101010201010101'*3, - 'NIST SP800-20 A.2 #27'), - (SP800_20_A2_PT, '19d032e64ab0bd8b', '0101010180010101'*3, - 'NIST SP800-20 A.2 #28'), - (SP800_20_A2_PT, '3cfaa7a7dc8720dc', '0101010140010101'*3, - 'NIST SP800-20 A.2 #29'), - (SP800_20_A2_PT, 'b7265f7f447ac6f3', '0101010120010101'*3, - 'NIST SP800-20 A.2 #30'), - (SP800_20_A2_PT, '9db73b3c0d163f54', '0101010110010101'*3, - 'NIST SP800-20 A.2 #31'), - (SP800_20_A2_PT, '8181b65babf4a975', '0101010108010101'*3, - 'NIST SP800-20 A.2 #32'), - (SP800_20_A2_PT, '93c9b64042eaa240', '0101010104010101'*3, - 'NIST SP800-20 A.2 #33'), - (SP800_20_A2_PT, '5570530829705592', '0101010102010101'*3, - 'NIST SP800-20 A.2 #34'), - (SP800_20_A2_PT, '8638809e878787a0', '0101010101800101'*3, - 'NIST SP800-20 A.2 #35'), - (SP800_20_A2_PT, '41b9a79af79ac208', '0101010101400101'*3, - 'NIST SP800-20 A.2 #36'), - (SP800_20_A2_PT, '7a9be42f2009a892', '0101010101200101'*3, - 'NIST SP800-20 A.2 #37'), - (SP800_20_A2_PT, '29038d56ba6d2745', '0101010101100101'*3, - 'NIST SP800-20 A.2 #38'), - (SP800_20_A2_PT, '5495c6abf1e5df51', '0101010101080101'*3, - 'NIST SP800-20 A.2 #39'), - (SP800_20_A2_PT, 'ae13dbd561488933', '0101010101040101'*3, - 'NIST SP800-20 A.2 #40'), - (SP800_20_A2_PT, '024d1ffa8904e389', '0101010101020101'*3, - 'NIST SP800-20 A.2 #41'), - (SP800_20_A2_PT, 'd1399712f99bf02e', '0101010101018001'*3, - 'NIST SP800-20 A.2 #42'), - (SP800_20_A2_PT, '14c1d7c1cffec79e', '0101010101014001'*3, - 'NIST SP800-20 A.2 #43'), - (SP800_20_A2_PT, '1de5279dae3bed6f', '0101010101012001'*3, - 'NIST SP800-20 A.2 #44'), - (SP800_20_A2_PT, 'e941a33f85501303', '0101010101011001'*3, - 'NIST SP800-20 A.2 #45'), - (SP800_20_A2_PT, 'da99dbbc9a03f379', '0101010101010801'*3, - 'NIST SP800-20 A.2 #46'), - (SP800_20_A2_PT, 'b7fc92f91d8e92e9', '0101010101010401'*3, - 'NIST SP800-20 A.2 #47'), - (SP800_20_A2_PT, 'ae8e5caa3ca04e85', '0101010101010201'*3, - 'NIST SP800-20 A.2 #48'), - (SP800_20_A2_PT, '9cc62df43b6eed74', '0101010101010180'*3, - 'NIST SP800-20 A.2 #49'), - (SP800_20_A2_PT, 'd863dbb5c59a91a0', '0101010101010140'*3, - 'NIST SP800-20 A.2 #50'), - (SP800_20_A2_PT, 'a1ab2190545b91d7', '0101010101010120'*3, - 'NIST SP800-20 A.2 #51'), - (SP800_20_A2_PT, '0875041e64c570f7', '0101010101010110'*3, - 'NIST SP800-20 A.2 #52'), - (SP800_20_A2_PT, '5a594528bebef1cc', '0101010101010108'*3, - 'NIST SP800-20 A.2 #53'), - (SP800_20_A2_PT, 'fcdb3291de21f0c0', '0101010101010104'*3, - 'NIST SP800-20 A.2 #54'), - (SP800_20_A2_PT, '869efd7f9f265a09', '0101010101010102'*3, - 'NIST SP800-20 A.2 #55'), - - # "Two-key 3DES". Test vector generated using PyCrypto 2.0.1. - # This test is designed to test the DES3 API, not the correctness of the - # output. - ('21e81b7ade88a259', '5c577d4d9b20c0f8', - '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), - - # The following test vectors have been generated with gpg v1.4.0. - # The command line used was: - # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ - # --disable-mdc --s2k-mode 0 --output ct pt - # For an explanation, see test_AES.py . - ( 'ac1762037074324fb53ba3596f73656d69746556616c6c6579', # Plaintext, 'YosemiteValley' - '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d', # Ciphertext - '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2', # Key (hash of 'BearsAhead') - 'GPG Test Vector #1', - dict(mode='OPENPGP', iv='cd47e2afb8b7e4b0', encrypted_iv='6a7eef0b58050e8b904a' ) ), -] - -def get_tests(config={}): - from Crypto.Cipher import DES3 - from .common import make_block_tests - return make_block_tests(DES3, "DES3", test_data) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_XOR.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_XOR.py deleted file mode 100644 index ffd082c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_XOR.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/XOR.py: Self-test for the XOR "cipher" -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Cipher.XOR""" - -import unittest - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (plaintext, ciphertext, key) tuples. -test_data = [ - # Test vectors written from scratch. (Nobody posts XOR test vectors on the web? How disappointing.) - ('01', '01', - '00', - 'zero key'), - - ('0102040810204080', '0003050911214181', - '01', - '1-byte key'), - - ('0102040810204080', 'cda8c8a2dc8a8c2a', - 'ccaa', - '2-byte key'), - - ('ff'*64, 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0'*2, - '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', - '32-byte key'), -] - -class TruncationSelfTest(unittest.TestCase): - - def runTest(self): - """33-byte key (should raise ValueError under current implementation)""" - # Crypto.Cipher.XOR previously truncated its inputs at 32 bytes. Now - # it should raise a ValueError if the length is too long. - self.assertRaises(ValueError, XOR.new, "x"*33) - -def get_tests(config={}): - global XOR - from Crypto.Cipher import XOR - from .common import make_stream_tests - return make_stream_tests(XOR, "XOR", test_data) + [TruncationSelfTest()] - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py deleted file mode 100644 index b6f3802..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import unittest -import sys - -from Crypto.PublicKey import RSA -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex -from Crypto import Random -from Crypto.Cipher import PKCS1_v1_5 as PKCS -from Crypto.Util.py3compat import * - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\n', '\t', ' ']: - t = t.replace(c,'') - return t - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = b(rws(t)) - if len(clean)%2 == 1: - print(clean) - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - -class PKCS1_15_Tests(unittest.TestCase): - - def setUp(self): - self.rng = Random.new().read - self.key1024 = RSA.generate(1024, self.rng) - - # List of tuples with test data for PKCS#1 v1.5. - # Each tuple is made up by: - # Item #0: dictionary with RSA key component, or key to import - # Item #1: plaintext - # Item #2: ciphertext - # Item #3: random data - - _testData = ( - - # - # Generated with openssl 0.9.8o - # - ( - # Private key - '''-----BEGIN RSA PRIVATE KEY----- -MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY -W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA -zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB -AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk -y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 -atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG -uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k -IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 -d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ -8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a -1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT -NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs -HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= ------END RSA PRIVATE KEY-----''', - # Plaintext - '''THIS IS PLAINTEXT\x0A''', - # Ciphertext - '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 - 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c - 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 - 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 - f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 - 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f - da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 - 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', - # Random data - '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d - 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a - 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc - f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d - 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c - bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 - 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' - ), - ) - - def testEncrypt1(self): - for test in self._testData: - # Build the key - key = RSA.importKey(test[0]) - # RNG that takes its random numbers from a pool given - # at initialization - class randGen: - def __init__(self, data): - self.data = data - self.idx = 0 - def __call__(self, N): - r = self.data[self.idx:N] - self.idx += N - return r - # The real test - key._randfunc = randGen(t2b(test[3])) - cipher = PKCS.new(key) - ct = cipher.encrypt(b(test[1])) - self.assertEqual(ct, t2b(test[2])) - - def testEncrypt2(self): - # Verify that encryption fail if plaintext is too long - pt = '\x00'*(128-11+1) - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.encrypt, pt) - - def testVerify1(self): - for test in self._testData: - # Build the key - key = RSA.importKey(test[0]) - # The real test - cipher = PKCS.new(key) - pt = cipher.decrypt(t2b(test[2]), "---") - self.assertEqual(pt, b(test[1])) - - def testVerify2(self): - # Verify that decryption fails if ciphertext is not as long as - # RSA modulus - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") - self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") - - # Verify that decryption fails if there are less then 8 non-zero padding - # bytes - pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) - ct = self.key1024.encrypt(pt, 0)[0] - ct = b('\x00'*(128-len(ct))) + ct - self.assertEqual("---", cipher.decrypt(ct, "---")) - - def testEncryptVerify1(self): - # Encrypt/Verify messages of length [0..RSAlen-11] - # and therefore padding [8..117] - for pt_len in range(0,128-11+1): - pt = self.rng(pt_len) - cipher = PKCS.new(self.key1024) - ct = cipher.encrypt(pt) - pt2 = cipher.decrypt(ct, "---") - self.assertEqual(pt,pt2) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PKCS1_15_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py deleted file mode 100644 index 3613415..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py +++ /dev/null @@ -1,372 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - - -__revision__ = "$Id$" - -import unittest - -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex - -from Crypto.Util.py3compat import * -from Crypto.PublicKey import RSA -from Crypto.Cipher import PKCS1_OAEP as PKCS -from Crypto.Hash import MD2,MD5,SHA as SHA1,SHA256,RIPEMD -from Crypto import Random - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\n', '\t', ' ']: - t = t.replace(c,'') - return t - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = rws(t) - if len(clean)%2 == 1: - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - -class PKCS1_OAEP_Tests(unittest.TestCase): - - def setUp(self): - self.rng = Random.new().read - self.key1024 = RSA.generate(1024, self.rng) - - # List of tuples with test data for PKCS#1 OAEP - # Each tuple is made up by: - # Item #0: dictionary with RSA key component - # Item #1: plaintext - # Item #2: ciphertext - # Item #3: random data (=seed) - # Item #4: hash object - - _testData = ( - - # - # From in oaep-int.txt to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 - 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f - b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 - 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f - af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 - ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e - e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f - e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', - # Public key - 'e':'11', - # In the test vector, only p and q were given... - # d is computed offline as e^{-1} mod (p-1)(q-1) - 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 - 668b42784813801579641b29410b3c7998d6bc465745e5c3 - 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 - 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef - 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b - 8883fe4463a4bc85b1cb3c1''' - } - , - # Plaintext - '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', - # Ciphertext - '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 - 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 - 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 - 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb - 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 - 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 - da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d - 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', - # Random - '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 - f0 6c b5 8f''', - # Hash - SHA1, - ), - - # - # From in oaep-vect.txt to be found in Example 1.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 - 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab - c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 - 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 - f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 - c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 - 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 - 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', - 'e':'''01 00 01''', - 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c - 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd - 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b - 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 - fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 - 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf - b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de - f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' - } - , - # Plaintext - '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 - 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', - # Ciphertext - '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f - 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb - 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 - 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f - a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 - d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 - d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c - 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', - # Random - '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 - dd a0 a5 ef''', - SHA1 - ), - - # - # From in oaep-vect.txt to be found in Example 2.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 - e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e - 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba - 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d - 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d - de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 - 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 - 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f - 45''', - 'e':'''01 00 01''', - 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa - 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 - c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 - 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc - d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 - 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 - 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 - 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' - }, - # Plaintext - '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 - 52 b5 8c f1 d9 2f ec 57 0b ee e7''', - # Ciphertext - '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 - 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a - 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a - ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 - 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d - 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a - bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f - ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 - 0e''', - # Random - '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 - e7 1b 17 82''', - SHA1 - ), - - # - # From in oaep-vect.txt to be found in Example 10.1 - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d - db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 - df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 - 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 - 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d - 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f - a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 - 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 - 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a - 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 - c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 - 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef - a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 - 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 - d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc - 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', - 'e':'''01 00 01''', - 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 - 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 - 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 - d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d - 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 - 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be - be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 - 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 - 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 - 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 - fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 - 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 - 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 - 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 - 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e - 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' - }, - # Plaintext - '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 - b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', - # Ciphertext - '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 - 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 - ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee - e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 - 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a - 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc - c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb - ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 - 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec - 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 - fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 - 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 - ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 - ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 - 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 - bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', - # Random - '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 - aa 63 bd 33''', - SHA1 - ), - ) - - def testEncrypt1(self): - # Verify encryption using all test vectors - for test in self._testData: - # Build the key - comps = [ int(rws(test[0][x]),16) for x in ('n','e') ] - key = RSA.construct(comps) - # RNG that takes its random numbers from a pool given - # at initialization - class randGen: - def __init__(self, data): - self.data = data - self.idx = 0 - def __call__(self, N): - r = self.data[self.idx:N] - self.idx += N - return r - # The real test - key._randfunc = randGen(t2b(test[3])) - cipher = PKCS.new(key, test[4]) - ct = cipher.encrypt(t2b(test[1])) - self.assertEqual(ct, t2b(test[2])) - - def testEncrypt2(self): - # Verify that encryption fails if plaintext is too long - pt = '\x00'*(128-2*20-2+1) - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.encrypt, pt) - - def testDecrypt1(self): - # Verify decryption using all test vectors - for test in self._testData: - # Build the key - comps = [ int(rws(test[0][x]),16) for x in ('n','e','d') ] - key = RSA.construct(comps) - # The real test - cipher = PKCS.new(key, test[4]) - pt = cipher.decrypt(t2b(test[2])) - self.assertEqual(pt, t2b(test[1])) - - def testDecrypt2(self): - # Simplest possible negative tests - for ct_size in (127,128,129): - cipher = PKCS.new(self.key1024) - self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) - - def testEncryptDecrypt1(self): - # Encrypt/Decrypt messages of length [0..128-2*20-2] - for pt_len in range(0,128-2*20-2): - pt = self.rng(pt_len) - ct = PKCS.encrypt(pt, self.key1024) - pt2 = PKCS.decrypt(ct, self.key1024) - self.assertEqual(pt,pt2) - - def testEncryptDecrypt1(self): - # Helper function to monitor what's requested from RNG - global asked - def localRng(N): - global asked - asked += N - return self.rng(N) - # Verify that OAEP is friendly to all hashes - for hashmod in (MD2,MD5,SHA1,SHA256,RIPEMD): - # Verify that encrypt() asks for as many random bytes - # as the hash output size - asked = 0 - pt = self.rng(40) - self.key1024._randfunc = localRng - cipher = PKCS.new(self.key1024, hashmod) - ct = cipher.encrypt(pt) - self.assertEqual(cipher.decrypt(ct), pt) - self.assertTrue(asked > hashmod.digest_size) - - def testEncryptDecrypt2(self): - # Verify that OAEP supports labels - pt = self.rng(35) - xlabel = self.rng(22) - cipher = PKCS.new(self.key1024, label=xlabel) - ct = cipher.encrypt(pt) - self.assertEqual(cipher.decrypt(ct), pt) - - def testEncryptDecrypt3(self): - # Verify that encrypt() uses the custom MGF - global mgfcalls - # Helper function to monitor what's requested from MGF - def newMGF(seed,maskLen): - global mgfcalls - mgfcalls += 1 - return bchr(0x00)*maskLen - mgfcalls = 0 - pt = self.rng(32) - cipher = PKCS.new(self.key1024, mgfunc=newMGF) - ct = cipher.encrypt(pt) - self.assertEqual(mgfcalls, 2) - self.assertEqual(cipher.decrypt(ct), pt) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PKCS1_OAEP_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/__init__.py deleted file mode 100644 index bb19f9b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/__init__.py: Self-test for hash modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for hash modules""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) - from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) - from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) - from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) - from Crypto.SelfTest.Hash import test_RIPEMD; tests += test_RIPEMD.get_tests(config=config) - from Crypto.SelfTest.Hash import test_SHA; tests += test_SHA.get_tests(config=config) - from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) - try: - from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) - from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) - from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) - except ImportError: - import sys - sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n") - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/common.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/common.py deleted file mode 100644 index 12e169f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/common.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-testing for PyCrypto hash modules""" - -__revision__ = "$Id$" - -import sys -import unittest -import binascii -from Crypto.Util.py3compat import * - -# For compatibility with Python 2.1 and Python 2.2 -if sys.hexversion < 0x02030000: - # Python 2.1 doesn't have a dict() function - # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') - def dict(**kwargs): - return kwargs.copy() -else: - dict = dict - - -class HashDigestSizeSelfTest(unittest.TestCase): - - def __init__(self, hashmod, description, expected): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.expected = expected - self.description = description - - def shortDescription(self): - return self.description - - def runTest(self): - self.assertTrue(hasattr(self.hashmod, "digest_size")) - self.assertEqual(self.hashmod.digest_size, self.expected) - h = self.hashmod.new() - self.assertTrue(hasattr(h, "digest_size")) - self.assertEqual(h.digest_size, self.expected) - - -class HashSelfTest(unittest.TestCase): - - def __init__(self, hashmod, description, expected, input): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.expected = expected - self.input = input - self.description = description - - def shortDescription(self): - return self.description - - def runTest(self): - h = self.hashmod.new() - h.update(self.input) - - out1 = binascii.b2a_hex(h.digest()) - out2 = h.hexdigest() - - h = self.hashmod.new(self.input) - - out3 = h.hexdigest() - out4 = binascii.b2a_hex(h.digest()) - - # PY3K: hexdigest() should return str(), and digest() bytes - self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() - if sys.version_info[0] == 2: - self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() - self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() - else: - self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() - self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() - self.assertEqual(self.expected, out4) # h = .new(data); h.digest() - - # Verify that new() object method produces a fresh hash object - h2 = h.new() - h2.update(self.input) - out5 = binascii.b2a_hex(h2.digest()) - self.assertEqual(self.expected, out5) - -class HashTestOID(unittest.TestCase): - def __init__(self, hashmod, oid): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.oid = oid - - def runTest(self): - h = self.hashmod.new() - if self.oid==None: - try: - raised = 0 - a = h.oid - except AttributeError: - raised = 1 - self.assertEqual(raised,1) - else: - self.assertEqual(h.oid, self.oid) - -class MACSelfTest(unittest.TestCase): - - def __init__(self, hashmod, description, expected_dict, input, key, hashmods): - unittest.TestCase.__init__(self) - self.hashmod = hashmod - self.expected_dict = expected_dict - self.input = input - self.key = key - self.hashmods = hashmods - self.description = description - - def shortDescription(self): - return self.description - - def runTest(self): - for hashname in list(self.expected_dict.keys()): - hashmod = self.hashmods[hashname] - key = binascii.a2b_hex(b(self.key)) - data = binascii.a2b_hex(b(self.input)) - - # Strip whitespace from the expected string (which should be in lowercase-hex) - expected = b("".join(self.expected_dict[hashname].split())) - - h = self.hashmod.new(key, digestmod=hashmod) - h.update(data) - out1 = binascii.b2a_hex(h.digest()) - out2 = h.hexdigest() - - h = self.hashmod.new(key, data, hashmod) - - out3 = h.hexdigest() - out4 = binascii.b2a_hex(h.digest()) - - # Test .copy() - h2 = h.copy() - h.update(b("blah blah blah")) # Corrupt the original hash object - out5 = binascii.b2a_hex(h2.digest()) # The copied hash object should return the correct result - - # PY3K: hexdigest() should return str(), and digest() bytes - self.assertEqual(expected, out1) - if sys.version_info[0] == 2: - self.assertEqual(expected, out2) - self.assertEqual(expected, out3) - else: - self.assertEqual(expected.decode(), out2) - self.assertEqual(expected.decode(), out3) - self.assertEqual(expected, out4) - self.assertEqual(expected, out5) - -def make_hash_tests(module, module_name, test_data, digest_size, oid=None): - tests = [] - for i in range(len(test_data)): - row = test_data[i] - (expected, input) = list(map(b,row[0:2])) - if len(row) < 3: - description = repr(input) - else: - description = row[2].encode('latin-1') - name = "%s #%d: %s" % (module_name, i+1, description) - tests.append(HashSelfTest(module, name, expected, input)) - if oid is not None: - oid = b(oid) - name = "%s #%d: digest_size" % (module_name, i+1) - tests.append(HashDigestSizeSelfTest(module, name, digest_size)) - tests.append(HashTestOID(module, oid)) - return tests - -def make_mac_tests(module, module_name, test_data, hashmods): - tests = [] - for i in range(len(test_data)): - row = test_data[i] - (key, data, results, description) = row - name = "%s #%d: %s" % (module_name, i+1, description) - tests.append(MACSelfTest(module, name, results, data, key, hashmods)) - return tests - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_HMAC.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_HMAC.py deleted file mode 100644 index 8a1123e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_HMAC.py +++ /dev/null @@ -1,223 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/HMAC.py: Self-test for the HMAC module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.HMAC""" - -__revision__ = "$Id$" - -from .common import dict # For compatibility with Python 2.1 and 2.2 -from Crypto.Util.py3compat import * - -# This is a list of (key, data, results, description) tuples. -test_data = [ - ## Test vectors from RFC 2202 ## - # Test that the default hashmod is MD5 - ('0b' * 16, - '4869205468657265', - dict(default='9294727a3638bb1c13f48ef8158bfc9d'), - 'default-is-MD5'), - - # Test case 1 (MD5) - ('0b' * 16, - '4869205468657265', - dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), - 'RFC 2202 #1-MD5 (HMAC-MD5)'), - - # Test case 1 (SHA1) - ('0b' * 20, - '4869205468657265', - dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), - 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), - - # Test case 2 - ('4a656665', - '7768617420646f2079612077616e7420666f72206e6f7468696e673f', - dict(MD5='750c783e6ab0b503eaa86e310a5db738', - SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), - 'RFC 2202 #2 (HMAC-MD5/SHA1)'), - - # Test case 3 (MD5) - ('aa' * 16, - 'dd' * 50, - dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), - 'RFC 2202 #3-MD5 (HMAC-MD5)'), - - # Test case 3 (SHA1) - ('aa' * 20, - 'dd' * 50, - dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), - 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), - - # Test case 4 - ('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'cd' * 50, - dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', - SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), - 'RFC 2202 #4 (HMAC-MD5/SHA1)'), - - # Test case 5 (MD5) - ('0c' * 16, - '546573742057697468205472756e636174696f6e', - dict(MD5='56461ef2342edc00f9bab995690efd4c'), - 'RFC 2202 #5-MD5 (HMAC-MD5)'), - - # Test case 5 (SHA1) - # NB: We do not implement hash truncation, so we only test the full hash here. - ('0c' * 20, - '546573742057697468205472756e636174696f6e', - dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), - 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), - - # Test case 6 - ('aa' * 80, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b6579202d2048617368204b6579204669727374', - dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', - SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), - 'RFC 2202 #6 (HMAC-MD5/SHA1)'), - - # Test case 7 - ('aa' * 80, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' - + '53697a652044617461', - dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', - SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), - 'RFC 2202 #7 (HMAC-MD5/SHA1)'), - - ## Test vectors from RFC 4231 ## - # 4.2. Test Case 1 - ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', - '4869205468657265', - dict(SHA256=''' - b0344c61d8db38535ca8afceaf0bf12b - 881dc200c9833da726e9376c2e32cff7 - '''), - 'RFC 4231 #1 (HMAC-SHA256)'), - - # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC - # output. - ('4a656665', - '7768617420646f2079612077616e7420666f72206e6f7468696e673f', - dict(SHA256=''' - 5bdcc146bf60754e6a042426089575c7 - 5a003f089d2739839dec58b964ec3843 - '''), - 'RFC 4231 #2 (HMAC-SHA256)'), - - # 4.4. Test Case 3 - Test with a combined length of key and data that is - # larger than 64 bytes (= block-size of SHA-224 and SHA-256). - ('aa' * 20, - 'dd' * 50, - dict(SHA256=''' - 773ea91e36800e46854db8ebd09181a7 - 2959098b3ef8c122d9635514ced565fe - '''), - 'RFC 4231 #3 (HMAC-SHA256)'), - - # 4.5. Test Case 4 - Test with a combined length of key and data that is - # larger than 64 bytes (= block-size of SHA-224 and SHA-256). - ('0102030405060708090a0b0c0d0e0f10111213141516171819', - 'cd' * 50, - dict(SHA256=''' - 82558a389a443c0ea4cc819899f2083a - 85f0faa3e578f8077a2e3ff46729665b - '''), - 'RFC 4231 #4 (HMAC-SHA256)'), - - # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. - # - # Not included because we do not implement hash truncation. - # - - # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of - # SHA-384 and SHA-512). - ('aa' * 131, - '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' - + '65204b6579202d2048617368204b6579204669727374', - dict(SHA256=''' - 60e431591ee0b67f0d8a26aacbf5b77f - 8e0bc6213728c5140546040f0ee37f54 - '''), - 'RFC 4231 #6 (HMAC-SHA256)'), - - # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes - # (= block-size of SHA-384 and SHA-512). - ('aa' * 131, - '5468697320697320612074657374207573696e672061206c6172676572207468' - + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' - + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' - + '647320746f20626520686173686564206265666f7265206265696e6720757365' - + '642062792074686520484d414320616c676f726974686d2e', - dict(SHA256=''' - 9b09ffa71b942fcb27635fbcd5b0e944 - bfdc63644f0713938a7f51535c3a35e2 - '''), - 'RFC 4231 #7 (HMAC-SHA256)'), -] - -hashlib_test_data = [ - # Test case 8 (SHA224) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), - 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), - - # Test case 9 (SHA384) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), - 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), - - # Test case 10 (SHA512) - ('4a656665', - '7768617420646f2079612077616e74' - + '20666f72206e6f7468696e673f', - dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), - 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), - -] - -def get_tests(config={}): - global test_data - from Crypto.Hash import HMAC, MD5, SHA as SHA1, SHA256 - from .common import make_mac_tests - hashmods = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256, default=None) - try: - from Crypto.Hash import SHA224, SHA384, SHA512 - hashmods.update(dict(SHA224=SHA224, SHA384=SHA384, SHA512=SHA512)) - test_data += hashlib_test_data - except ImportError: - import sys - sys.stderr.write("SelfTest: warning: not testing HMAC-SHA224/384/512 (not available)\n") - return make_mac_tests(HMAC, "HMAC", test_data, hashmods) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD2.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD2.py deleted file mode 100644 index 8653d1a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD2.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.MD2""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1319 - ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), - ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), - ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), - ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), - - ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('da33def2a42df13975352846c30338cd', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('d5976f79d83d3a0dc9806c3c66f3efd8', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), -] - -def get_tests(config={}): - from Crypto.Hash import MD2 - from .common import make_hash_tests - return make_hash_tests(MD2, "MD2", test_data, - digest_size=16, - oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD4.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD4.py deleted file mode 100644 index 11259ef..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD4.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.MD4""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1320 - ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), - ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), - ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), - ('d9130a8164549fe818874806e1c7014b', 'message digest'), - - ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('043f8582f241db351ce627e153e7f0e4', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('e33b4ddc9c38f2199c3e7b164fcc0536', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), -] - -def get_tests(config={}): - from Crypto.Hash import MD4 - from .common import make_hash_tests - return make_hash_tests(MD4, "MD4", test_data, - digest_size=16, - oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD5.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD5.py deleted file mode 100644 index 724dea0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_MD5.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.MD5""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors from RFC 1321 - ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), - ('0cc175b9c0f1b6a831c399e269772661', 'a'), - ('900150983cd24fb0d6963f7d28e17f72', 'abc'), - ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), - - ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('d174ab98d277d9f5a5611c2c9f419d9f', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('57edf4a22be3c955ac49da2e2107b67a', - '1234567890123456789012345678901234567890123456' - + '7890123456789012345678901234567890', - "'1234567890' * 8"), -] - -def get_tests(config={}): - from Crypto.Hash import MD5 - from .common import make_hash_tests - return make_hash_tests(MD5, "MD5", test_data, - digest_size=16, - oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py deleted file mode 100644 index 6c58876..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_RIPEMD.py: Self-test for the RIPEMD-160 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -#"""Self-test suite for Crypto.Hash.RIPEMD""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # Test vectors downloaded 2008-09-12 from - # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html - ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), - ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), - ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), - ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), - - ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', - 'abcdefghijklmnopqrstuvwxyz', - 'a-z'), - - ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', - 'abcdbcd...pnopq'), - - ('b0e20b6e3116640286ed3a87a5713079b21f5189', - 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', - 'A-Z, a-z, 0-9'), - - ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', - '1234567890' * 8, - "'1234567890' * 8"), - - ('52783243c1697bdbe16d37f97f68f08325dc1528', - 'a' * 10**6, - '"a" * 10**6'), -] - -def get_tests(config={}): - from Crypto.Hash import RIPEMD - from .common import make_hash_tests - return make_hash_tests(RIPEMD, "RIPEMD", test_data, - digest_size=20, - oid="\x06\x05\x2b\x24\x03\02\x01") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA.py deleted file mode 100644 index bd52a46..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/SHA.py: Self-test for the SHA-1 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.SHA""" - -__revision__ = "$Id$" - -from Crypto.Util.py3compat import * - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # FIPS PUB 180-2, A.1 - "One-Block Message" - ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), - - # FIPS PUB 180-2, A.2 - "Multi-Block Message" - ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # FIPS PUB 180-2, A.3 - "Long Message" -# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', -# 'a' * 10**6, -# '"a" * 10**6'), - - # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) - ('dea356a2cddd90c7a7ecedc5ebb563934f460452', - '01234567' * 80, - '"01234567" * 80'), -] - -def get_tests(config={}): - from Crypto.Hash import SHA - from .common import make_hash_tests - return make_hash_tests(SHA, "SHA", test_data, - digest_size=20, - oid="\x06\x05\x2B\x0E\x03\x02\x1A") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA224.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA224.py deleted file mode 100644 index 1db501b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA224.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.SHA224""" - -__revision__ = "$Id$" - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - - # RFC 3874: Section 3.1, "Test Vector #1 - ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), - - # RFC 3874: Section 3.2, "Test Vector #2 - ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # RFC 3874: Section 3.3, "Test Vector #3 - ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), - - # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), - - ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - - ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', - 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), - -] - -def get_tests(config={}): - from Crypto.Hash import SHA224 - from .common import make_hash_tests - return make_hash_tests(SHA224, "SHA224", test_data, - digest_size=28, - oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA256.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA256.py deleted file mode 100644 index 9ea9cc6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA256.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.SHA256""" - -__revision__ = "$Id$" - -import unittest -from Crypto.Util.py3compat import * - -class LargeSHA256Test(unittest.TestCase): - def runTest(self): - """SHA256: 512/520 MiB test""" - from Crypto.Hash import SHA256 - zeros = bchr(0x00) * (1024*1024) - - h = SHA256.new(zeros) - for i in range(511): - h.update(zeros) - - # This test vector is from PyCrypto's old testdata.py file. - self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB - - for i in range(8): - h.update(zeros) - - # This test vector is from PyCrypto's old testdata.py file. - self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB - -def get_tests(config={}): - # Test vectors from FIPS PUB 180-2 - # This is a list of (expected_result, input[, description]) tuples. - test_data = [ - # FIPS PUB 180-2, B.1 - "One-Block Message" - ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', - 'abc'), - - # FIPS PUB 180-2, B.2 - "Multi-Block Message" - ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), - - # FIPS PUB 180-2, B.3 - "Long Message" - ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', - 'a' * 10**6, - '"a" * 10**6'), - - # Test for an old PyCrypto bug. - ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', - 'This message is precisely 55 bytes long, to test a bug.', - 'Length = 55 (mod 64)'), - - # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), - - ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - ] - - from Crypto.Hash import SHA256 - from .common import make_hash_tests - tests = make_hash_tests(SHA256, "SHA256", test_data, - digest_size=32, - oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01") - - if config.get('slow_tests'): - tests += [LargeSHA256Test()] - - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA384.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA384.py deleted file mode 100644 index 0c2f79c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA384.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.SHA384""" - -__revision__ = "$Id$" - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - - # RFC 4634: Section Page 8.4, "Test 1" - ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), - - # RFC 4634: Section Page 8.4, "Test 2.2" - ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), - - # RFC 4634: Section Page 8.4, "Test 3" - ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), - - # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), - - # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', - 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), - -] - -def get_tests(config={}): - from Crypto.Hash import SHA384 - from .common import make_hash_tests - return make_hash_tests(SHA384, "SHA384", test_data, - digest_size=48, - oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA512.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA512.py deleted file mode 100644 index 31ace3d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Hash/test_SHA512.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Hash.SHA512""" - -__revision__ = "$Id$" - -# Test vectors from various sources -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - - # RFC 4634: Section Page 8.4, "Test 1" - ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), - - # RFC 4634: Section Page 8.4, "Test 2.1" - ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), - - # RFC 4634: Section Page 8.4, "Test 3" - ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), - - # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm - ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), - - ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), -] - -def get_tests(config={}): - from Crypto.Hash import SHA512 - from .common import make_hash_tests - return make_hash_tests(SHA512, "SHA512", test_data, - digest_size=64, - oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03") - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/__init__.py deleted file mode 100644 index a62c670..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Protocol/__init__.py: Self-tests for Crypto.Protocol -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for Crypto.Protocol""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.Protocol import test_chaffing; tests += test_chaffing.get_tests(config=config) - from Crypto.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) - from Crypto.SelfTest.Protocol import test_AllOrNothing; tests += test_AllOrNothing.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py deleted file mode 100644 index a211eab..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py +++ /dev/null @@ -1,76 +0,0 @@ -# -# Test script for Crypto.Protocol.AllOrNothing -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import unittest -from Crypto.Protocol import AllOrNothing -from Crypto.Util.py3compat import * - -text = b("""\ -When in the Course of human events, it becomes necessary for one people to -dissolve the political bands which have connected them with another, and to -assume among the powers of the earth, the separate and equal station to which -the Laws of Nature and of Nature's God entitle them, a decent respect to the -opinions of mankind requires that they should declare the causes which impel -them to the separation. - -We hold these truths to be self-evident, that all men are created equal, that -they are endowed by their Creator with certain unalienable Rights, that among -these are Life, Liberty, and the pursuit of Happiness. That to secure these -rights, Governments are instituted among Men, deriving their just powers from -the consent of the governed. That whenever any Form of Government becomes -destructive of these ends, it is the Right of the People to alter or to -abolish it, and to institute new Government, laying its foundation on such -principles and organizing its powers in such form, as to them shall seem most -likely to effect their Safety and Happiness. -""") - -class AllOrNothingTest (unittest.TestCase): - - def runTest(self): - "Simple test of AllOrNothing" - - from Crypto.Cipher import AES - import base64 - - # The current AllOrNothing will fail - # every so often. Repeat the test - # several times to force this. - for i in range(50): - x = AllOrNothing.AllOrNothing(AES) - - msgblocks = x.digest(text) - - # get a new undigest-only object so there's no leakage - y = AllOrNothing.AllOrNothing(AES) - text2 = y.undigest(msgblocks) - self.assertEqual(text, text2) - -def get_tests(config={}): - return [AllOrNothingTest()] - -if __name__ == "__main__": - unittest.main() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_KDF.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_KDF.py deleted file mode 100644 index d3e3be9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_KDF.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import unittest -from binascii import unhexlify - -from Crypto.SelfTest.st_common import list_test_cases -from Crypto.Hash import SHA as SHA1,HMAC - -from Crypto.Protocol.KDF import * - -def t2b(t): return unhexlify(b(t)) - -class PBKDF1_Tests(unittest.TestCase): - - # List of tuples with test data. - # Each tuple is made up by: - # Item #0: a pass phrase - # Item #1: salt (8 bytes encoded in hex) - # Item #2: output key length - # Item #3: iterations to use - # Item #4: expected result (encoded in hex) - _testData = ( - # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf - ("password","78578E5A5D63CB06",16,1000,"DC19847E05C64D2FAF10EBFB4A3D2A20"), - ) - - def test1(self): - v = self._testData[0] - res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) - self.assertEqual(res, t2b(v[4])) - -class PBKDF2_Tests(unittest.TestCase): - - # List of tuples with test data. - # Each tuple is made up by: - # Item #0: a pass phrase - # Item #1: salt (encoded in hex) - # Item #2: output key length - # Item #3: iterations to use - # Item #4: expected result (encoded in hex) - _testData = ( - # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf - ("password","78578E5A5D63CB06",24,2048,"BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), - # From RFC 6050 - ("password","73616c74", 20, 1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), - ("password","73616c74", 20, 2, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), - ("password","73616c74", 20, 4096, "4b007901b765489abead49d926f721d065a429c1"), - ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", - 25, 4096, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), - ( 'pass\x00word',"7361006c74",16,4096, "56fa6aa75548099dcc37d7f03425e0c3"), - ) - - def test1(self): - # Test only for HMAC-SHA1 as PRF - - def prf(p,s): - return HMAC.new(p,s,SHA1).digest() - - for i in range(len(self._testData)): - v = self._testData[i] - res = PBKDF2(v[0], t2b(v[1]), v[2], v[3]) - res2 = PBKDF2(v[0], t2b(v[1]), v[2], v[3], prf) - self.assertEqual(res, t2b(v[4])) - self.assertEqual(res, res2) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PBKDF1_Tests) - tests += list_test_cases(PBKDF2_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py deleted file mode 100644 index 5fa0120..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py +++ /dev/null @@ -1,74 +0,0 @@ -# -# Test script for Crypto.Protocol.Chaffing -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import unittest -from Crypto.Protocol import Chaffing - -text = """\ -When in the Course of human events, it becomes necessary for one people to -dissolve the political bands which have connected them with another, and to -assume among the powers of the earth, the separate and equal station to which -the Laws of Nature and of Nature's God entitle them, a decent respect to the -opinions of mankind requires that they should declare the causes which impel -them to the separation. - -We hold these truths to be self-evident, that all men are created equal, that -they are endowed by their Creator with certain unalienable Rights, that among -these are Life, Liberty, and the pursuit of Happiness. That to secure these -rights, Governments are instituted among Men, deriving their just powers from -the consent of the governed. That whenever any Form of Government becomes -destructive of these ends, it is the Right of the People to alter or to -abolish it, and to institute new Government, laying its foundation on such -principles and organizing its powers in such form, as to them shall seem most -likely to effect their Safety and Happiness. -""" - -class ChaffingTest (unittest.TestCase): - - def runTest(self): - "Simple tests of chaffing and winnowing" - # Test constructors - Chaffing.Chaff() - Chaffing.Chaff(0.5, 1) - self.assertRaises(ValueError, Chaffing.Chaff, factor=-1) - self.assertRaises(ValueError, Chaffing.Chaff, blocksper=-1) - - data = [(1, 'data1', 'data1'), (2, 'data2', 'data2')] - c = Chaffing.Chaff(1.0, 1) - c.chaff(data) - chaff = c.chaff(data) - self.assertEqual(len(chaff), 4) - - c = Chaffing.Chaff(0.0, 1) - chaff = c.chaff(data) - self.assertEqual(len(chaff), 2) - -def get_tests(config={}): - return [ChaffingTest()] - -if __name__ == "__main__": - unittest.main() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py deleted file mode 100644 index 0878cc5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py +++ /dev/null @@ -1,62 +0,0 @@ -# -# Test script for Crypto.Util.RFC1751. -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import binascii -import unittest -from Crypto.Util import RFC1751 -from Crypto.Util.py3compat import * - -test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), - ('CCAC2AED591056BE4F90FD441C534766', - 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), - ('EFF81F9BFBC65350920CDD7416DE8009', - 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') - ] - -class RFC1751Test_k2e (unittest.TestCase): - - def runTest (self): - "Check converting keys to English" - for key, words in test_data: - key=binascii.a2b_hex(b(key)) - self.assertEqual(RFC1751.key_to_english(key), words) - -class RFC1751Test_e2k (unittest.TestCase): - - def runTest (self): - "Check converting English strings to keys" - for key, words in test_data: - key=binascii.a2b_hex(b(key)) - self.assertEqual(RFC1751.english_to_key(words), key) - -# class RFC1751Test - -def get_tests(config={}): - return [RFC1751Test_k2e(), RFC1751Test_e2k()] - -if __name__ == "__main__": - unittest.main() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/__init__.py deleted file mode 100644 index 61ba53f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/__init__.py: Self-test for public key crypto -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for public-key crypto""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.PublicKey import test_DSA; tests += test_DSA.get_tests(config=config) - from Crypto.SelfTest.PublicKey import test_RSA; tests += test_RSA.get_tests(config=config) - from Crypto.SelfTest.PublicKey import test_importKey; tests += test_importKey.get_tests(config=config) - from Crypto.SelfTest.PublicKey import test_ElGamal; tests += test_ElGamal.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py deleted file mode 100644 index b05f69a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py +++ /dev/null @@ -1,244 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.PublicKey.DSA""" - -__revision__ = "$Id$" - -import sys -import os -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import unittest -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex - -def _sws(s): - """Remove whitespace from a text or byte string""" - if isinstance(s,str): - return "".join(s.split()) - else: - return b("").join(s.split()) - -class DSATest(unittest.TestCase): - # Test vector from "Appendix 5. Example of the DSA" of - # "Digital Signature Standard (DSS)", - # U.S. Department of Commerce/National Institute of Standards and Technology - # FIPS 186-2 (+Change Notice), 2000 January 27. - # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf - - y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 - 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 - 858fba33 f44c0669 9630a76b 030ee333""") - - g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb - 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c - c42e9f6f 464b088c c572af53 e6d78802""") - - p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 - cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac - 49693dfb f83724c2 ec0736ee 31c80291""") - - q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") - - x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") - - k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") - k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") - m = b2a_hex(b("abc")) - m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") - r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") - s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") - - def setUp(self): - global DSA, Random, bytes_to_long, size - from Crypto.PublicKey import DSA - from Crypto import Random - from Crypto.Util.number import bytes_to_long, inverse, size - - self.dsa = DSA - - def test_generate_1arg(self): - """DSA (default implementation) generated key (1 argument)""" - dsaObj = self.dsa.generate(1024) - self._check_private_key(dsaObj) - pub = dsaObj.publickey() - self._check_public_key(pub) - - def test_generate_2arg(self): - """DSA (default implementation) generated key (2 arguments)""" - dsaObj = self.dsa.generate(1024, Random.new().read) - self._check_private_key(dsaObj) - pub = dsaObj.publickey() - self._check_public_key(pub) - - def test_construct_4tuple(self): - """DSA (default implementation) constructed key (4-tuple)""" - (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] - dsaObj = self.dsa.construct((y, g, p, q)) - self._test_verification(dsaObj) - - def test_construct_5tuple(self): - """DSA (default implementation) constructed key (5-tuple)""" - (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] - dsaObj = self.dsa.construct((y, g, p, q, x)) - self._test_signing(dsaObj) - self._test_verification(dsaObj) - - def _check_private_key(self, dsaObj): - # Check capabilities - self.assertEqual(1, dsaObj.has_private()) - self.assertEqual(1, dsaObj.can_sign()) - self.assertEqual(0, dsaObj.can_encrypt()) - self.assertEqual(0, dsaObj.can_blind()) - - # Check dsaObj.[ygpqx] -> dsaObj.key.[ygpqx] mapping - self.assertEqual(dsaObj.y, dsaObj.key.y) - self.assertEqual(dsaObj.g, dsaObj.key.g) - self.assertEqual(dsaObj.p, dsaObj.key.p) - self.assertEqual(dsaObj.q, dsaObj.key.q) - self.assertEqual(dsaObj.x, dsaObj.key.x) - - # Sanity check key data - self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q - self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits - self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 - self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p - self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q - - def _check_public_key(self, dsaObj): - k = a2b_hex(self.k) - m_hash = a2b_hex(self.m_hash) - - # Check capabilities - self.assertEqual(0, dsaObj.has_private()) - self.assertEqual(1, dsaObj.can_sign()) - self.assertEqual(0, dsaObj.can_encrypt()) - self.assertEqual(0, dsaObj.can_blind()) - - # Check dsaObj.[ygpq] -> dsaObj.key.[ygpq] mapping - self.assertEqual(dsaObj.y, dsaObj.key.y) - self.assertEqual(dsaObj.g, dsaObj.key.g) - self.assertEqual(dsaObj.p, dsaObj.key.p) - self.assertEqual(dsaObj.q, dsaObj.key.q) - - # Check that private parameters are all missing - self.assertEqual(0, hasattr(dsaObj, 'x')) - self.assertEqual(0, hasattr(dsaObj.key, 'x')) - - # Sanity check key data - self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q - self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits - self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 - - # Public-only key objects should raise an error when .sign() is called - self.assertRaises(TypeError, dsaObj.sign, m_hash, k) - - # Check __eq__ and __ne__ - self.assertEqual(dsaObj.publickey() == dsaObj.publickey(),True) # assert_ - self.assertEqual(dsaObj.publickey() != dsaObj.publickey(),False) # failIf - - def _test_signing(self, dsaObj): - k = a2b_hex(self.k) - m_hash = a2b_hex(self.m_hash) - r = bytes_to_long(a2b_hex(self.r)) - s = bytes_to_long(a2b_hex(self.s)) - (r_out, s_out) = dsaObj.sign(m_hash, k) - self.assertEqual((r, s), (r_out, s_out)) - - def _test_verification(self, dsaObj): - m_hash = a2b_hex(self.m_hash) - r = bytes_to_long(a2b_hex(self.r)) - s = bytes_to_long(a2b_hex(self.s)) - self.assertEqual(1, dsaObj.verify(m_hash, (r, s))) - self.assertEqual(0, dsaObj.verify(m_hash + b("\0"), (r, s))) - -class DSAFastMathTest(DSATest): - def setUp(self): - DSATest.setUp(self) - self.dsa = DSA.DSAImplementation(use_fast_math=True) - - def test_generate_1arg(self): - """DSA (_fastmath implementation) generated key (1 argument)""" - DSATest.test_generate_1arg(self) - - def test_generate_2arg(self): - """DSA (_fastmath implementation) generated key (2 arguments)""" - DSATest.test_generate_2arg(self) - - def test_construct_4tuple(self): - """DSA (_fastmath implementation) constructed key (4-tuple)""" - DSATest.test_construct_4tuple(self) - - def test_construct_5tuple(self): - """DSA (_fastmath implementation) constructed key (5-tuple)""" - DSATest.test_construct_5tuple(self) - -class DSASlowMathTest(DSATest): - def setUp(self): - DSATest.setUp(self) - self.dsa = DSA.DSAImplementation(use_fast_math=False) - - def test_generate_1arg(self): - """DSA (_slowmath implementation) generated key (1 argument)""" - DSATest.test_generate_1arg(self) - - def test_generate_2arg(self): - """DSA (_slowmath implementation) generated key (2 arguments)""" - DSATest.test_generate_2arg(self) - - def test_construct_4tuple(self): - """DSA (_slowmath implementation) constructed key (4-tuple)""" - DSATest.test_construct_4tuple(self) - - def test_construct_5tuple(self): - """DSA (_slowmath implementation) constructed key (5-tuple)""" - DSATest.test_construct_5tuple(self) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(DSATest) - try: - from Crypto.PublicKey import _fastmath - tests += list_test_cases(DSAFastMathTest) - except ImportError: - from distutils.sysconfig import get_config_var - import inspect - _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) - +"/../../PublicKey/_fastmath"+get_config_var("SO")) - if os.path.exists(_fm_path): - raise ImportError("While the _fastmath module exists, importing "+ - "it failed. This may point to the gmp or mpir shared library "+ - "not being in the path. _fastmath was found at "+_fm_path) - tests += list_test_cases(DSASlowMathTest) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py deleted file mode 100644 index cebab30..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py +++ /dev/null @@ -1,210 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.PublicKey.ElGamal""" - -__revision__ = "$Id$" - -import unittest -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex -from Crypto import Random -from Crypto.PublicKey import ElGamal -from Crypto.Util.number import * -from Crypto.Util.py3compat import * - -class ElGamalTest(unittest.TestCase): - - # - # Test vectors - # - # There seem to be no real ElGamal test vectors available in the - # public domain. The following test vectors have been generated - # with libgcrypt 1.5.0. - # - # Encryption - tve=[ - { - # 256 bits - 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', - 'g' :'05', - 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', - 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', - 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', - 'pt' :'48656C6C6F207468657265', - 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', - 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' - }, - - { - # 512 bits - 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', - 'g' :'07', - 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', - 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', - 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', - 'pt' :'48656C6C6F207468657265', - 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', - 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' - } - ] - - # Signature - tvs=[ - { - # 256 bits - 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', - 'g' :'05', - 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', - 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', - 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', - 'h' :'48656C6C6F207468657265', - 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', - 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', - }, - { - # 512 bits - 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', - 'g' :'0B', - 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', - 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', - 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', - 'h' :'48656C6C6F207468657265', - 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', - 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', - } - ] - - def test_generate_128(self): - self._test_random_key(128) - - def test_generate_512(self): - self._test_random_key(512) - - def test_encryption(self): - for tv in self.tve: - for as_longs in (0,1): - d = self.convert_tv(tv, as_longs) - key = ElGamal.construct(d['key']) - ct = key.encrypt(d['pt'], d['k']) - self.assertEqual(ct[0], d['ct1']) - self.assertEqual(ct[1], d['ct2']) - - def test_decryption(self): - for tv in self.tve: - for as_longs in (0,1): - d = self.convert_tv(tv, as_longs) - key = ElGamal.construct(d['key']) - pt = key.decrypt((d['ct1'], d['ct2'])) - self.assertEqual(pt, d['pt']) - - def test_signing(self): - for tv in self.tvs: - for as_longs in (0,1): - d = self.convert_tv(tv, as_longs) - key = ElGamal.construct(d['key']) - sig1, sig2 = key.sign(d['h'], d['k']) - self.assertEqual(sig1, d['sig1']) - self.assertEqual(sig2, d['sig2']) - - def test_verification(self): - for tv in self.tvs: - for as_longs in (0,1): - d = self.convert_tv(tv, as_longs) - key = ElGamal.construct(d['key']) - # Positive test - res = key.verify( d['h'], (d['sig1'],d['sig2']) ) - self.assertTrue(res) - # Negative test - res = key.verify( d['h'], (d['sig1']+1,d['sig2']) ) - self.assertFalse(res) - - def convert_tv(self, tv, as_longs=0): - """Convert a test vector from textual form (hexadecimal ascii - to either integers or byte strings.""" - key_comps = 'p','g','y','x' - tv2 = {} - for c in list(tv.keys()): - tv2[c] = a2b_hex(tv[c]) - if as_longs or c in key_comps or c in ('sig1','sig2'): - tv2[c] = bytes_to_long(tv2[c]) - tv2['key']=[] - for c in key_comps: - tv2['key'] += [tv2[c]] - del tv2[c] - return tv2 - - def _test_random_key(self, bits): - elgObj = ElGamal.generate(bits, Random.new().read) - self._check_private_key(elgObj) - self._exercise_primitive(elgObj) - pub = elgObj.publickey() - self._check_public_key(pub) - self._exercise_public_primitive(elgObj) - - def _check_private_key(self, elgObj): - - # Check capabilities - self.assertTrue(elgObj.has_private()) - self.assertTrue(elgObj.can_sign()) - self.assertTrue(elgObj.can_encrypt()) - - # Sanity check key data - self.assertTrue(1 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.PublicKey.RSA""" - -__revision__ = "$Id$" - -import sys -import os -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import unittest -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex - -class RSATest(unittest.TestCase): - # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # See RSADSI's PKCS#1 page at - # http://www.rsa.com/rsalabs/node.asp?id=2125 - - # from oaep-int.txt - - # TODO: PyCrypto treats the message as starting *after* the leading "00" - # TODO: That behaviour should probably be changed in the future. - plaintext = """ - eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 - ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 - c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af - f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db - 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a - b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 - 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f - 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d - """ - - ciphertext = """ - 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 - 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 - 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 - 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb - 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 - 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 - da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d - 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 - """ - - modulus = """ - bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 - 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f - b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 - 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f - af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 - ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e - e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f - e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb - """ - - e = 0x11 # public exponent - - prime_factor = """ - c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 - 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 - 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf - ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 - """ - - def setUp(self): - global RSA, Random, bytes_to_long - from Crypto.PublicKey import RSA - from Crypto import Random - from Crypto.Util.number import bytes_to_long, inverse - self.n = bytes_to_long(a2b_hex(self.modulus)) - self.p = bytes_to_long(a2b_hex(self.prime_factor)) - - # Compute q, d, and u from n, e, and p - self.q = divmod(self.n, self.p)[0] - self.d = inverse(self.e, (self.p-1)*(self.q-1)) - self.u = inverse(self.p, self.q) # u = e**-1 (mod q) - - self.rsa = RSA - - def test_generate_1arg(self): - """RSA (default implementation) generated key (1 argument)""" - rsaObj = self.rsa.generate(1024) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.publickey() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - - def test_generate_2arg(self): - """RSA (default implementation) generated key (2 arguments)""" - rsaObj = self.rsa.generate(1024, Random.new().read) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.publickey() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - - def test_generate_3args(self): - rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) - self._check_private_key(rsaObj) - self._exercise_primitive(rsaObj) - pub = rsaObj.publickey() - self._check_public_key(pub) - self._exercise_public_primitive(rsaObj) - self.assertEqual(65537,rsaObj.e) - - def test_construct_2tuple(self): - """RSA (default implementation) constructed key (2-tuple)""" - pub = self.rsa.construct((self.n, self.e)) - self._check_public_key(pub) - self._check_encryption(pub) - self._check_verification(pub) - - def test_construct_3tuple(self): - """RSA (default implementation) constructed key (3-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d)) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - self._check_signing(rsaObj) - self._check_verification(rsaObj) - - def test_construct_4tuple(self): - """RSA (default implementation) constructed key (4-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - self._check_signing(rsaObj) - self._check_verification(rsaObj) - - def test_construct_5tuple(self): - """RSA (default implementation) constructed key (5-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) - self._check_private_key(rsaObj) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - self._check_signing(rsaObj) - self._check_verification(rsaObj) - - def test_construct_6tuple(self): - """RSA (default implementation) constructed key (6-tuple)""" - rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) - self._check_private_key(rsaObj) - self._check_encryption(rsaObj) - self._check_decryption(rsaObj) - self._check_signing(rsaObj) - self._check_verification(rsaObj) - - def test_factoring(self): - rsaObj = self.rsa.construct([self.n, self.e, self.d]) - self.assertTrue(rsaObj.p==self.p or rsaObj.p==self.q) - self.assertTrue(rsaObj.q==self.p or rsaObj.q==self.q) - self.assertTrue(rsaObj.q*rsaObj.p == self.n) - - self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) - - def _check_private_key(self, rsaObj): - # Check capabilities - self.assertEqual(1, rsaObj.has_private()) - self.assertEqual(1, rsaObj.can_sign()) - self.assertEqual(1, rsaObj.can_encrypt()) - self.assertEqual(1, rsaObj.can_blind()) - - # Check rsaObj.[nedpqu] -> rsaObj.key.[nedpqu] mapping - self.assertEqual(rsaObj.n, rsaObj.key.n) - self.assertEqual(rsaObj.e, rsaObj.key.e) - self.assertEqual(rsaObj.d, rsaObj.key.d) - self.assertEqual(rsaObj.p, rsaObj.key.p) - self.assertEqual(rsaObj.q, rsaObj.key.q) - self.assertEqual(rsaObj.u, rsaObj.key.u) - - # Sanity check key data - self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq - self.assertEqual(1, rsaObj.d * rsaObj.e % ((rsaObj.p-1) * (rsaObj.q-1))) # ed = 1 (mod (p-1)(q-1)) - self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) - self.assertEqual(1, rsaObj.p > 1) # p > 1 - self.assertEqual(1, rsaObj.q > 1) # q > 1 - self.assertEqual(1, rsaObj.e > 1) # e > 1 - self.assertEqual(1, rsaObj.d > 1) # d > 1 - - def _check_public_key(self, rsaObj): - ciphertext = a2b_hex(self.ciphertext) - - # Check capabilities - self.assertEqual(0, rsaObj.has_private()) - self.assertEqual(1, rsaObj.can_sign()) - self.assertEqual(1, rsaObj.can_encrypt()) - self.assertEqual(1, rsaObj.can_blind()) - - # Check rsaObj.[ne] -> rsaObj.key.[ne] mapping - self.assertEqual(rsaObj.n, rsaObj.key.n) - self.assertEqual(rsaObj.e, rsaObj.key.e) - - # Check that private parameters are all missing - self.assertEqual(0, hasattr(rsaObj, 'd')) - self.assertEqual(0, hasattr(rsaObj, 'p')) - self.assertEqual(0, hasattr(rsaObj, 'q')) - self.assertEqual(0, hasattr(rsaObj, 'u')) - self.assertEqual(0, hasattr(rsaObj.key, 'd')) - self.assertEqual(0, hasattr(rsaObj.key, 'p')) - self.assertEqual(0, hasattr(rsaObj.key, 'q')) - self.assertEqual(0, hasattr(rsaObj.key, 'u')) - - # Sanity check key data - self.assertEqual(1, rsaObj.e > 1) # e > 1 - - # Public keys should not be able to sign or decrypt - self.assertRaises(TypeError, rsaObj.sign, ciphertext, b("")) - self.assertRaises(TypeError, rsaObj.decrypt, ciphertext) - - # Check __eq__ and __ne__ - self.assertEqual(rsaObj.publickey() == rsaObj.publickey(),True) # assert_ - self.assertEqual(rsaObj.publickey() != rsaObj.publickey(),False) # failIf - - def _exercise_primitive(self, rsaObj): - # Since we're using a randomly-generated key, we can't check the test - # vector, but we can make sure encryption and decryption are inverse - # operations. - ciphertext = a2b_hex(self.ciphertext) - - # Test decryption - plaintext = rsaObj.decrypt((ciphertext,)) - - # Test encryption (2 arguments) - (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) - self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) - - # Test blinded decryption - blinding_factor = Random.new().read(len(ciphertext)-1) - blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) - blinded_ptext = rsaObj.decrypt((blinded_ctext,)) - unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) - self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) - - # Test signing (2 arguments) - signature2 = rsaObj.sign(ciphertext, b("")) - self.assertEqual((bytes_to_long(plaintext),), signature2) - - # Test verification - self.assertEqual(1, rsaObj.verify(ciphertext, (bytes_to_long(plaintext),))) - - def _exercise_public_primitive(self, rsaObj): - plaintext = a2b_hex(self.plaintext) - - # Test encryption (2 arguments) - (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) - - # Exercise verification - rsaObj.verify(new_ciphertext2, (bytes_to_long(plaintext),)) - - def _check_encryption(self, rsaObj): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - # Test encryption (2 arguments) - (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) - self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) - - def _check_decryption(self, rsaObj): - plaintext = a2b_hex(self.plaintext) - ciphertext = a2b_hex(self.ciphertext) - - # Test plain decryption - new_plaintext = rsaObj.decrypt((ciphertext,)) - self.assertEqual(b2a_hex(plaintext), b2a_hex(new_plaintext)) - - # Test blinded decryption - blinding_factor = Random.new().read(len(ciphertext)-1) - blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) - blinded_ptext = rsaObj.decrypt((blinded_ctext,)) - unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) - self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) - - def _check_verification(self, rsaObj): - signature = bytes_to_long(a2b_hex(self.plaintext)) - message = a2b_hex(self.ciphertext) - - # Test verification - t = (signature,) # rsaObj.verify expects a tuple - self.assertEqual(1, rsaObj.verify(message, t)) - - # Test verification with overlong tuple (this is a - # backward-compatibility hack to support some harmless misuse of the - # API) - t2 = (signature, '') - self.assertEqual(1, rsaObj.verify(message, t2)) # extra garbage at end of tuple - - def _check_signing(self, rsaObj): - signature = bytes_to_long(a2b_hex(self.plaintext)) - message = a2b_hex(self.ciphertext) - - # Test signing (2 argument) - self.assertEqual((signature,), rsaObj.sign(message, b(""))) - -class RSAFastMathTest(RSATest): - def setUp(self): - RSATest.setUp(self) - self.rsa = RSA.RSAImplementation(use_fast_math=True) - - def test_generate_1arg(self): - """RSA (_fastmath implementation) generated key (1 argument)""" - RSATest.test_generate_1arg(self) - - def test_generate_2arg(self): - """RSA (_fastmath implementation) generated key (2 arguments)""" - RSATest.test_generate_2arg(self) - - def test_construct_2tuple(self): - """RSA (_fastmath implementation) constructed key (2-tuple)""" - RSATest.test_construct_2tuple(self) - - def test_construct_3tuple(self): - """RSA (_fastmath implementation) constructed key (3-tuple)""" - RSATest.test_construct_3tuple(self) - - def test_construct_4tuple(self): - """RSA (_fastmath implementation) constructed key (4-tuple)""" - RSATest.test_construct_4tuple(self) - - def test_construct_5tuple(self): - """RSA (_fastmath implementation) constructed key (5-tuple)""" - RSATest.test_construct_5tuple(self) - - def test_construct_6tuple(self): - """RSA (_fastmath implementation) constructed key (6-tuple)""" - RSATest.test_construct_6tuple(self) - - def test_factoring(self): - RSATest.test_factoring(self) - -class RSASlowMathTest(RSATest): - def setUp(self): - RSATest.setUp(self) - self.rsa = RSA.RSAImplementation(use_fast_math=False) - - def test_generate_1arg(self): - """RSA (_slowmath implementation) generated key (1 argument)""" - RSATest.test_generate_1arg(self) - - def test_generate_2arg(self): - """RSA (_slowmath implementation) generated key (2 arguments)""" - RSATest.test_generate_2arg(self) - - def test_construct_2tuple(self): - """RSA (_slowmath implementation) constructed key (2-tuple)""" - RSATest.test_construct_2tuple(self) - - def test_construct_3tuple(self): - """RSA (_slowmath implementation) constructed key (3-tuple)""" - RSATest.test_construct_3tuple(self) - - def test_construct_4tuple(self): - """RSA (_slowmath implementation) constructed key (4-tuple)""" - RSATest.test_construct_4tuple(self) - - def test_construct_5tuple(self): - """RSA (_slowmath implementation) constructed key (5-tuple)""" - RSATest.test_construct_5tuple(self) - - def test_construct_6tuple(self): - """RSA (_slowmath implementation) constructed key (6-tuple)""" - RSATest.test_construct_6tuple(self) - - def test_factoring(self): - RSATest.test_factoring(self) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(RSATest) - try: - from Crypto.PublicKey import _fastmath - tests += list_test_cases(RSAFastMathTest) - except ImportError: - from distutils.sysconfig import get_config_var - import inspect - _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) - +"/../../PublicKey/_fastmath"+get_config_var("SO")) - if os.path.exists(_fm_path): - raise ImportError("While the _fastmath module exists, importing "+ - "it failed. This may point to the gmp or mpir shared library "+ - "not being in the path. _fastmath was found at "+_fm_path) - if config.get('slow_tests',1): - tests += list_test_cases(RSASlowMathTest) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py deleted file mode 100644 index 01fbdf8..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py +++ /dev/null @@ -1,345 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - - -__revision__ = "$Id$" - -import unittest - -from Crypto.PublicKey import RSA -from Crypto.SelfTest.st_common import * -from Crypto.Util.py3compat import * -from Crypto.Util.number import inverse -from Crypto.Util import asn1 - -def der2pem(der, text='PUBLIC'): - import binascii - chunks = [ binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48) ] - pem = b('-----BEGIN %s KEY-----\n' % text) - pem += b('').join(chunks) - pem += b('-----END %s KEY-----' % text) - return pem - -class ImportKeyTests(unittest.TestCase): - # 512-bit RSA key generated with openssl - rsaKeyPEM = '''-----BEGIN RSA PRIVATE KEY----- -MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII -q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 -Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI -OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr -+rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK -JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 -n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== ------END RSA PRIVATE KEY-----''' - - # As above, but this is actually an unencrypted PKCS#8 key - rsaKeyPEM8 = '''-----BEGIN PRIVATE KEY----- -MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS -ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj -wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK -5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk -B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC -NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx -yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 -BX85JB8zqwHB ------END PRIVATE KEY-----''' - - # The same RSA private key as in rsaKeyPEM, but now encrypted - rsaKeyEncryptedPEM=( - - # With DES and passphrase 'test' - ('test', '''-----BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-CBC,AF8F9A40BD2FA2FC - -Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA -u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs -C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP -BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy -9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY -IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp -dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= ------END RSA PRIVATE KEY-----''', - "\xAF\x8F\x9A\x40\xBD\x2F\xA2\xFC"), - - # With Triple-DES and passphrase 'rocking' - ('rocking', '''-----BEGIN RSA PRIVATE KEY----- -Proc-Type: 4,ENCRYPTED -DEK-Info: DES-EDE3-CBC,C05D6C07F7FC02F6 - -w4lwQrXaVoTTJ0GgwY566htTA2/t1YlimhxkxYt9AEeCcidS5M0Wq9ClPiPz9O7F -m6K5QpM1rxo1RUE/ZyI85gglRNPdNwkeTOqit+kum7nN73AToX17+irVmOA4Z9E+ -4O07t91GxGMcjUSIFk0ucwEU4jgxRvYscbvOMvNbuZszGdVNzBTVddnShKCsy9i7 -nJbPlXeEKYi/OkRgO4PtfqqWQu5GIEFVUf9ev1QV7AvC+kyWTR1wWYnHX265jU5c -sopxQQtP8XEHIJEdd5/p1oieRcWTCNyY8EkslxDSsrf0OtZp6mZH9N+KU47cgQtt -9qGORmlWnsIoFFKcDohbtOaWBTKhkj5h6OkLjFjfU/sBeV1c+7wDT3dAy5tawXjG -YSxC7qDQIT/RECvV3+oQKEcmpEujn45wAnkTi12BH30= ------END RSA PRIVATE KEY-----''', - "\xC0\x5D\x6C\x07\xF7\xFC\x02\xF6"), - ) - - rsaPublicKeyPEM = '''-----BEGIN PUBLIC KEY----- -MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T -Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== ------END PUBLIC KEY-----''' - - # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' - rsaPublicKeyOpenSSH = '''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''' - - # The private key, in PKCS#1 format encoded with DER - rsaKeyDER = a2b_hex( - '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe - 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 - 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 - 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c - 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f - 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 - 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 - a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca - 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b - c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 - e4defe43ed91a3ae27bb057f39241f33ab01c1 - '''.replace(" ","")) - - # The private key, in unencrypted PKCS#8 format encoded with DER - rsaKeyDER8 = a2b_hex( - '''30820155020100300d06092a864886f70d01010105000482013f3082013 - b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 - ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 - 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 - 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb - f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da - 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c - a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 - 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a - f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf - 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 - 3ed91a3ae27bb057f39241f33ab01c1 - '''.replace(" ","")) - - rsaPublicKeyDER = a2b_hex( - '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a - a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c - b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 - 03010001 - '''.replace(" ","")) - - n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) - e = 65537 - d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) - p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) - q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) - - # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} - # mod q) instead! - qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) - pInv = inverse(p,q) - - def testImportKey1(self): - """Verify import of RSAPrivateKey DER SEQUENCE""" - key = self.rsa.importKey(self.rsaKeyDER) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey2(self): - """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" - key = self.rsa.importKey(self.rsaPublicKeyDER) - self.assertFalse(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey3unicode(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" - key = RSA.importKey(self.rsaKeyPEM) - self.assertEqual(key.has_private(),True) # assert_ - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey3bytes(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" - key = RSA.importKey(b(self.rsaKeyPEM)) - self.assertEqual(key.has_private(),True) # assert_ - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey4unicode(self): - """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" - key = RSA.importKey(self.rsaPublicKeyPEM) - self.assertEqual(key.has_private(),False) # failIf - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey4bytes(self): - """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" - key = RSA.importKey(b(self.rsaPublicKeyPEM)) - self.assertEqual(key.has_private(),False) # failIf - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey5(self): - """Verifies that the imported key is still a valid RSA pair""" - key = RSA.importKey(self.rsaKeyPEM) - idem = key.encrypt(key.decrypt(b("Test")),0) - self.assertEqual(idem[0],b("Test")) - - def testImportKey6(self): - """Verifies that the imported key is still a valid RSA pair""" - key = RSA.importKey(self.rsaKeyDER) - idem = key.encrypt(key.decrypt(b("Test")),0) - self.assertEqual(idem[0],b("Test")) - - def testImportKey7(self): - """Verify import of OpenSSH public key""" - key = self.rsa.importKey(self.rsaPublicKeyOpenSSH) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - - def testImportKey8(self): - """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" - for t in self.rsaKeyEncryptedPEM: - key = self.rsa.importKey(t[1], t[0]) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey9(self): - """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" - key = self.rsa.importKey(self.rsaKeyDER8) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey10(self): - """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" - key = self.rsa.importKey(self.rsaKeyPEM8) - self.assertTrue(key.has_private()) - self.assertEqual(key.n, self.n) - self.assertEqual(key.e, self.e) - self.assertEqual(key.d, self.d) - self.assertEqual(key.p, self.p) - self.assertEqual(key.q, self.q) - - def testImportKey11(self): - """Verify import of RSAPublicKey DER SEQUENCE""" - der = asn1.DerSequence([17, 3]).encode() - key = self.rsa.importKey(der) - self.assertEqual(key.n, 17) - self.assertEqual(key.e, 3) - - def testImportKey12(self): - """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" - der = asn1.DerSequence([17, 3]).encode() - pem = der2pem(der) - key = self.rsa.importKey(pem) - self.assertEqual(key.n, 17) - self.assertEqual(key.e, 3) - - ### - def testExportKey1(self): - key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - derKey = key.exportKey("DER") - self.assertEqual(derKey, self.rsaKeyDER) - - def testExportKey2(self): - key = self.rsa.construct([self.n, self.e]) - derKey = key.exportKey("DER") - self.assertEqual(derKey, self.rsaPublicKeyDER) - - def testExportKey3(self): - key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - pemKey = key.exportKey("PEM") - self.assertEqual(pemKey, b(self.rsaKeyPEM)) - - def testExportKey4(self): - key = self.rsa.construct([self.n, self.e]) - pemKey = key.exportKey("PEM") - self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) - - def testExportKey5(self): - key = self.rsa.construct([self.n, self.e]) - openssh_1 = key.exportKey("OpenSSH").split() - openssh_2 = self.rsaPublicKeyOpenSSH.split() - self.assertEqual(openssh_1[0], openssh_2[0]) - self.assertEqual(openssh_1[1], openssh_2[1]) - - def testExportKey4(self): - key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - # Tuple with index #1 is encrypted with 3DES - t = list(map(b,self.rsaKeyEncryptedPEM[1])) - # Force the salt being used when exporting - key._randfunc = lambda N: (t[2]*divmod(N+len(t[2]),len(t[2]))[0])[:N] - pemKey = key.exportKey("PEM", t[0]) - self.assertEqual(pemKey, t[1]) - - def testExportKey5(self): - key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - derKey = key.exportKey("DER", pkcs=8) - self.assertEqual(derKey, self.rsaKeyDER8) - - def testExportKey6(self): - key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) - pemKey = key.exportKey("PEM", pkcs=8) - self.assertEqual(pemKey, b(self.rsaKeyPEM8)) - -class ImportKeyTestsSlow(ImportKeyTests): - def setUp(self): - self.rsa = RSA.RSAImplementation(use_fast_math=0) - -class ImportKeyTestsFast(ImportKeyTests): - def setUp(self): - self.rsa = RSA.RSAImplementation(use_fast_math=1) - -if __name__ == '__main__': - unittest.main() - -def get_tests(config={}): - tests = [] - try: - from Crypto.PublicKey import _fastmath - tests += list_test_cases(ImportKeyTestsFast) - except ImportError: - pass - tests += list_test_cases(ImportKeyTestsSlow) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py deleted file mode 100644 index 81a0e13..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/Fortuna/__init__.py: Self-test for Fortuna modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for the Crypto.Random.Fortuna package""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.Random.Fortuna import test_FortunaAccumulator; tests += test_FortunaAccumulator.get_tests(config=config) - from Crypto.SelfTest.Random.Fortuna import test_FortunaGenerator; tests += test_FortunaGenerator.get_tests(config=config) - from Crypto.SelfTest.Random.Fortuna import test_SHAd256; tests += test_SHAd256.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py deleted file mode 100644 index 4d288a0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/Fortuna/test_FortunaAccumulator.py: Self-test for the FortunaAccumulator module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for Crypto.Random.Fortuna.FortunaAccumulator""" - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import unittest -from binascii import b2a_hex - -class FortunaAccumulatorTests(unittest.TestCase): - def setUp(self): - global FortunaAccumulator - from Crypto.Random.Fortuna import FortunaAccumulator - - def test_FortunaPool(self): - """FortunaAccumulator.FortunaPool""" - pool = FortunaAccumulator.FortunaPool() - self.assertEqual(0, pool.length) - self.assertEqual("5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456", pool.hexdigest()) - - pool.append(b('abc')) - - self.assertEqual(3, pool.length) - self.assertEqual("4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358", pool.hexdigest()) - - pool.append(b("dbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")) - - self.assertEqual(56, pool.length) - self.assertEqual(b('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af'), b2a_hex(pool.digest())) - - pool.reset() - - self.assertEqual(0, pool.length) - - pool.append(b('a') * 10**6) - - self.assertEqual(10**6, pool.length) - self.assertEqual(b('80d1189477563e1b5206b2749f1afe4807e5705e8bd77887a60187a712156688'), b2a_hex(pool.digest())) - - def test_which_pools(self): - """FortunaAccumulator.which_pools""" - - # which_pools(0) should fail - self.assertRaises(AssertionError, FortunaAccumulator.which_pools, 0) - - self.assertEqual(FortunaAccumulator.which_pools(1), [0]) - self.assertEqual(FortunaAccumulator.which_pools(2), [0, 1]) - self.assertEqual(FortunaAccumulator.which_pools(3), [0]) - self.assertEqual(FortunaAccumulator.which_pools(4), [0, 1, 2]) - self.assertEqual(FortunaAccumulator.which_pools(5), [0]) - self.assertEqual(FortunaAccumulator.which_pools(6), [0, 1]) - self.assertEqual(FortunaAccumulator.which_pools(7), [0]) - self.assertEqual(FortunaAccumulator.which_pools(8), [0, 1, 2, 3]) - for i in range(1, 32): - self.assertEqual(FortunaAccumulator.which_pools(2**i-1), [0]) - self.assertEqual(FortunaAccumulator.which_pools(2**i), list(range(i+1))) - self.assertEqual(FortunaAccumulator.which_pools(2**i+1), [0]) - self.assertEqual(FortunaAccumulator.which_pools(2**31), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**32), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**33), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**34), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**35), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**36), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**64), list(range(32))) - self.assertEqual(FortunaAccumulator.which_pools(2**128), list(range(32))) - - def test_accumulator(self): - """FortunaAccumulator.FortunaAccumulator""" - fa = FortunaAccumulator.FortunaAccumulator() - - # This should fail, because we haven't seeded the PRNG yet - self.assertRaises(AssertionError, fa.random_data, 1) - - # Spread some test data across the pools (source number 42) - # This would be horribly insecure in a real system. - for p in range(32): - fa.add_random_event(42, p, b("X") * 32) - self.assertEqual(32+2, fa.pools[p].length) - - # This should still fail, because we haven't seeded the PRNG with 64 bytes yet - self.assertRaises(AssertionError, fa.random_data, 1) - - # Add more data - for p in range(32): - fa.add_random_event(42, p, b("X") * 32) - self.assertEqual((32+2)*2, fa.pools[p].length) - - # The underlying RandomGenerator should get seeded with Pool 0 - # s = SHAd256(chr(42) + chr(32) + "X"*32 + chr(42) + chr(32) + "X"*32) - # = SHA256(h'edd546f057b389155a31c32e3975e736c1dec030ddebb137014ecbfb32ed8c6f') - # = h'aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064' - # The counter and the key before reseeding is: - # C_0 = 0 - # K_0 = "\x00" * 32 - # The counter after reseeding is 1, and the new key after reseeding is - # C_1 = 1 - # K_1 = SHAd256(K_0 || s) - # = SHA256(h'0eae3e401389fab86640327ac919ecfcb067359d95469e18995ca889abc119a6') - # = h'aafe9d0409fbaaafeb0a1f2ef2014a20953349d3c1c6e6e3b962953bea6184dd' - # The first block of random data, therefore, is - # r_1 = AES-256(K_1, 1) - # = AES-256(K_1, h'01000000000000000000000000000000') - # = h'b7b86bd9a27d96d7bb4add1b6b10d157' - # The second block of random data is - # r_2 = AES-256(K_1, 2) - # = AES-256(K_1, h'02000000000000000000000000000000') - # = h'2350b1c61253db2f8da233be726dc15f' - # The third and fourth blocks of random data (which become the new key) are - # r_3 = AES-256(K_1, 3) - # = AES-256(K_1, h'03000000000000000000000000000000') - # = h'f23ad749f33066ff53d307914fbf5b21' - # r_4 = AES-256(K_1, 4) - # = AES-256(K_1, h'04000000000000000000000000000000') - # = h'da9667c7e86ba247655c9490e9d94a7c' - # K_2 = r_3 || r_4 - # = h'f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c' - # The final counter value is 5. - self.assertEqual("aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064", - fa.pools[0].hexdigest()) - self.assertEqual(None, fa.generator.key) - self.assertEqual(0, fa.generator.counter.next_value()) - - result = fa.random_data(32) - - self.assertEqual(b("b7b86bd9a27d96d7bb4add1b6b10d157" "2350b1c61253db2f8da233be726dc15f"), b2a_hex(result)) - self.assertEqual(b("f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c"), b2a_hex(fa.generator.key)) - self.assertEqual(5, fa.generator.counter.next_value()) - - def test_accumulator_pool_length(self): - """FortunaAccumulator.FortunaAccumulator minimum pool length""" - fa = FortunaAccumulator.FortunaAccumulator() - - # This test case is hard-coded to assume that FortunaAccumulator.min_pool_size is 64. - self.assertEqual(fa.min_pool_size, 64) - - # The PRNG should not allow us to get random data from it yet - self.assertRaises(AssertionError, fa.random_data, 1) - - # Add 60 bytes, 4 at a time (2 header + 2 payload) to each of the 32 pools - for i in range(15): - for p in range(32): - # Add the bytes to the pool - fa.add_random_event(2, p, b("XX")) - - # The PRNG should not allow us to get random data from it yet - self.assertRaises(AssertionError, fa.random_data, 1) - - # Add 4 more bytes to pool 0 - fa.add_random_event(2, 0, b("XX")) - - # We should now be able to get data from the accumulator - fa.random_data(1) - -def get_tests(config={}): - from Crypto.SelfTest.st_common import list_test_cases - return list_test_cases(FortunaAccumulatorTests) - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py deleted file mode 100644 index d41bb02..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/Fortuna/test_FortunaGenerator.py: Self-test for the FortunaGenerator module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for Crypto.Random.Fortuna.FortunaGenerator""" - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import unittest -from binascii import b2a_hex - -class FortunaGeneratorTests(unittest.TestCase): - def setUp(self): - global FortunaGenerator - from Crypto.Random.Fortuna import FortunaGenerator - - def test_generator(self): - """FortunaGenerator.AESGenerator""" - fg = FortunaGenerator.AESGenerator() - - # We shouldn't be able to read data until we've seeded the generator - self.assertRaises(Exception, fg.pseudo_random_data, 1) - self.assertEqual(0, fg.counter.next_value()) - - # Seed the generator, which should set the key and increment the counter. - fg.reseed(b("Hello")) - self.assertEqual(b("0ea6919d4361551364242a4ba890f8f073676e82cf1a52bb880f7e496648b565"), b2a_hex(fg.key)) - self.assertEqual(1, fg.counter.next_value()) - - # Read 2 full blocks from the generator - self.assertEqual(b("7cbe2c17684ac223d08969ee8b565616") + # counter=1 - b("717661c0d2f4758bd6ba140bf3791abd"), # counter=2 - b2a_hex(fg.pseudo_random_data(32))) - - # Meanwhile, the generator will have re-keyed itself and incremented its counter - self.assertEqual(b("33a1bb21987859caf2bbfc5615bef56d") + # counter=3 - b("e6b71ff9f37112d0c193a135160862b7"), # counter=4 - b2a_hex(fg.key)) - self.assertEqual(5, fg.counter.next_value()) - - # Read another 2 blocks from the generator - self.assertEqual(b("fd6648ba3086e919cee34904ef09a7ff") + # counter=5 - b("021f77580558b8c3e9248275f23042bf"), # counter=6 - b2a_hex(fg.pseudo_random_data(32))) - - - # Try to read more than 2**20 bytes using the internal function. This should fail. - self.assertRaises(AssertionError, fg._pseudo_random_data, 2**20+1) - -def get_tests(config={}): - from Crypto.SelfTest.st_common import list_test_cases - return list_test_cases(FortunaGeneratorTests) - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py deleted file mode 100644 index f94db8a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/Fortuna/test_SHAd256.py: Self-test for the SHAd256 hash function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.Fortuna.SHAd256""" - -__revision__ = "$Id$" -from Crypto.Util.py3compat import * - -# This is a list of (expected_result, input[, description]) tuples. -test_data = [ - # I could not find any test vectors for SHAd256, so I made these vectors by - # feeding some sample data into several plain SHA256 implementations - # (including OpenSSL, the "sha256sum" tool, and this implementation). - # This is a subset of the resulting test vectors. The complete list can be - # found at: http://www.dlitz.net/crypto/shad256-test-vectors/ - ('5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456', - '', "'' (empty string)"), - ('4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358', - 'abc'), - ('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af', - 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq') -] - -def get_tests(config={}): - from Crypto.Random.Fortuna import SHAd256 - from Crypto.SelfTest.Hash.common import make_hash_tests - return make_hash_tests(SHAd256, "SHAd256", test_data, 32) - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py deleted file mode 100644 index 44b3fa1..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/OSRNG/__init__.py: Self-test for OSRNG modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for Crypto.Random.OSRNG package""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - if os.name == 'nt': - from Crypto.SelfTest.Random.OSRNG import test_nt; tests += test_nt.get_tests(config=config) - from Crypto.SelfTest.Random.OSRNG import test_winrandom; tests += test_winrandom.get_tests(config=config) - elif os.name == 'posix': - from Crypto.SelfTest.Random.OSRNG import test_posix; tests += test_posix.get_tests(config=config) - if hasattr(os, 'urandom'): - from Crypto.SelfTest.Random.OSRNG import test_fallback; tests += test_fallback.get_tests(config=config) - from Crypto.SelfTest.Random.OSRNG import test_generic; tests += test_generic.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py deleted file mode 100644 index 41909b0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_fallback.py: Self-test for the OSRNG.fallback.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.OSRNG.fallback""" - -__revision__ = "$Id$" - -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.OSRNG.fallback.new()""" - # Import the OSRNG.nt module and try to use it - import Crypto.Random.OSRNG.fallback - randobj = Crypto.Random.OSRNG.fallback.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py deleted file mode 100644 index 2a40974..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_generic.py: Self-test for the OSRNG.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.OSRNG""" - -__revision__ = "$Id$" - -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.OSRNG.new()""" - # Import the OSRNG module and try to use it - import Crypto.Random.OSRNG - randobj = Crypto.Random.OSRNG.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py deleted file mode 100644 index a7a8338..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_generic.py: Self-test for the OSRNG.nt.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.OSRNG.nt""" - -__revision__ = "$Id$" - -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.OSRNG.nt.new()""" - # Import the OSRNG.nt module and try to use it - import Crypto.Random.OSRNG.nt - randobj = Crypto.Random.OSRNG.nt.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py deleted file mode 100644 index 2224afe..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_posix.py: Self-test for the OSRNG.posix.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.OSRNG.posix""" - -__revision__ = "$Id$" - -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.OSRNG.posix.new()""" - # Import the OSRNG.nt module and try to use it - import Crypto.Random.OSRNG.posix - randobj = Crypto.Random.OSRNG.posix.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py deleted file mode 100644 index 3010eb7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.OSRNG.winrandom""" - -__revision__ = "$Id$" - -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.OSRNG.winrandom""" - # Import the winrandom module and try to use it - from Crypto.Random.OSRNG import winrandom - randobj = winrandom.new() - x = randobj.get_bytes(16) - y = randobj.get_bytes(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/__init__.py deleted file mode 100644 index f972bf0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Random/__init__.py: Self-test for random number generation modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for random number generators""" - -__revision__ = "$Id$" - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest.Random import Fortuna; tests += Fortuna.get_tests(config=config) - from Crypto.SelfTest.Random import OSRNG; tests += OSRNG.get_tests(config=config) - from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) - from Crypto.SelfTest.Random import test_rpoolcompat; tests += test_rpoolcompat.get_tests(config=config) - from Crypto.SelfTest.Random import test__UserFriendlyRNG; tests += test__UserFriendlyRNG.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py deleted file mode 100644 index 1a13345..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py +++ /dev/null @@ -1,171 +0,0 @@ -# -*- coding: utf-8 -*- -# Self-tests for the user-friendly Crypto.Random interface -# -# Written in 2013 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for generic Crypto.Random stuff """ - - - -__revision__ = "$Id$" - -import binascii -import pprint -import unittest -import os -import time -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -try: - import multiprocessing -except ImportError: - multiprocessing = None - -import Crypto.Random._UserFriendlyRNG -import Crypto.Random.random - -class RNGForkTest(unittest.TestCase): - - def _get_reseed_count(self): - """ - Get `FortunaAccumulator.reseed_count`, the global count of the - number of times that the PRNG has been reseeded. - """ - rng_singleton = Crypto.Random._UserFriendlyRNG._get_singleton() - rng_singleton._lock.acquire() - try: - return rng_singleton._fa.reseed_count - finally: - rng_singleton._lock.release() - - def runTest(self): - # Regression test for CVE-2013-1445. We had a bug where, under the - # right conditions, two processes might see the same random sequence. - - if sys.platform.startswith('win'): # windows can't fork - assert not hasattr(os, 'fork') # ... right? - return - - # Wait 150 ms so that we don't trigger the rate-limit prematurely. - time.sleep(0.15) - - reseed_count_before = self._get_reseed_count() - - # One or both of these calls together should trigger a reseed right here. - Crypto.Random._UserFriendlyRNG._get_singleton().reinit() - Crypto.Random.get_random_bytes(1) - - reseed_count_after = self._get_reseed_count() - self.assertNotEqual(reseed_count_before, reseed_count_after) # sanity check: test should reseed parent before forking - - rfiles = [] - for i in range(10): - rfd, wfd = os.pipe() - if os.fork() == 0: - # child - os.close(rfd) - f = os.fdopen(wfd, "wb") - - Crypto.Random.atfork() - - data = Crypto.Random.get_random_bytes(16) - - f.write(data) - f.close() - os._exit(0) - # parent - os.close(wfd) - rfiles.append(os.fdopen(rfd, "rb")) - - results = [] - results_dict = {} - for f in rfiles: - data = binascii.hexlify(f.read()) - results.append(data) - results_dict[data] = 1 - f.close() - - if len(results) != len(list(results_dict.keys())): - raise AssertionError("RNG output duplicated across fork():\n%s" % - (pprint.pformat(results))) - - -# For RNGMultiprocessingForkTest -def _task_main(q): - a = Crypto.Random.get_random_bytes(16) - time.sleep(0.1) # wait 100 ms - b = Crypto.Random.get_random_bytes(16) - q.put(binascii.b2a_hex(a)) - q.put(binascii.b2a_hex(b)) - q.put(None) # Wait for acknowledgment - - -class RNGMultiprocessingForkTest(unittest.TestCase): - - def runTest(self): - # Another regression test for CVE-2013-1445. This is basically the - # same as RNGForkTest, but less compatible with old versions of Python, - # and a little easier to read. - - n_procs = 5 - manager = multiprocessing.Manager() - queues = [manager.Queue(1) for i in range(n_procs)] - - # Reseed the pool - time.sleep(0.15) - Crypto.Random._UserFriendlyRNG._get_singleton().reinit() - Crypto.Random.get_random_bytes(1) - - # Start the child processes - pool = multiprocessing.Pool(processes=n_procs, initializer=Crypto.Random.atfork) - map_result = pool.map_async(_task_main, queues) - - # Get the results, ensuring that no pool processes are reused. - aa = [queues[i].get(30) for i in range(n_procs)] - bb = [queues[i].get(30) for i in range(n_procs)] - res = list(zip(aa, bb)) - - # Shut down the pool - map_result.get(30) - pool.close() - pool.join() - - # Check that the results are unique - if len(set(aa)) != len(aa) or len(set(res)) != len(res): - raise AssertionError("RNG output duplicated across fork():\n%s" % - (pprint.pformat(res),)) - - -def get_tests(config={}): - tests = [] - tests += [RNGForkTest()] - if multiprocessing is not None: - tests += [RNGMultiprocessingForkTest()] - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_random.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_random.py deleted file mode 100644 index 7fed44f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_random.py +++ /dev/null @@ -1,171 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Random.new()""" - -__revision__ = "$Id$" - -import unittest -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Random.new()""" - # Import the Random module and try to use it - from Crypto import Random - randobj = Random.new() - x = randobj.read(16) - y = randobj.read(16) - self.assertNotEqual(x, y) - z = Random.get_random_bytes(16) - self.assertNotEqual(x, z) - self.assertNotEqual(y, z) - # Test the Random.random module, which - # implements a subset of Python's random API - # Not implemented: - # seed(), getstate(), setstate(), jumpahead() - # random(), uniform(), triangular(), betavariate() - # expovariate(), gammavariate(), gauss(), - # longnormvariate(), normalvariate(), - # vonmisesvariate(), paretovariate() - # weibullvariate() - # WichmannHill(), whseed(), SystemRandom() - from Crypto.Random import random - x = random.getrandbits(16*8) - y = random.getrandbits(16*8) - self.assertNotEqual(x, y) - # Test randrange - if x>y: - start = y - stop = x - else: - start = x - stop = y - for step in range(1,10): - x = random.randrange(start,stop,step) - y = random.randrange(start,stop,step) - self.assertNotEqual(x, y) - self.assertEqual(start <= x < stop, True) - self.assertEqual(start <= y < stop, True) - self.assertEqual((x - start) % step, 0) - self.assertEqual((y - start) % step, 0) - for i in range(10): - self.assertEqual(random.randrange(1,2), 1) - self.assertRaises(ValueError, random.randrange, start, start) - self.assertRaises(ValueError, random.randrange, stop, start, step) - self.assertRaises(TypeError, random.randrange, start, stop, step, step) - self.assertRaises(TypeError, random.randrange, start, stop, "1") - self.assertRaises(TypeError, random.randrange, "1", stop, step) - self.assertRaises(TypeError, random.randrange, 1, "2", step) - self.assertRaises(ValueError, random.randrange, start, stop, 0) - # Test randint - x = random.randint(start,stop) - y = random.randint(start,stop) - self.assertNotEqual(x, y) - self.assertEqual(start <= x <= stop, True) - self.assertEqual(start <= y <= stop, True) - for i in range(10): - self.assertEqual(random.randint(1,1), 1) - self.assertRaises(ValueError, random.randint, stop, start) - self.assertRaises(TypeError, random.randint, start, stop, step) - self.assertRaises(TypeError, random.randint, "1", stop) - self.assertRaises(TypeError, random.randint, 1, "2") - # Test choice - seq = list(range(10000)) - x = random.choice(seq) - y = random.choice(seq) - self.assertNotEqual(x, y) - self.assertEqual(x in seq, True) - self.assertEqual(y in seq, True) - for i in range(10): - self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) - self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) - if sys.version_info[0] is 3: - self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) - self.assertEqual(1, random.choice([1])) - self.assertRaises(IndexError, random.choice, []) - self.assertRaises(TypeError, random.choice, 1) - # Test shuffle. Lacks random parameter to specify function. - # Make copies of seq - seq = list(range(500)) - x = list(seq) - y = list(seq) - random.shuffle(x) - random.shuffle(y) - self.assertNotEqual(x, y) - self.assertEqual(len(seq), len(x)) - self.assertEqual(len(seq), len(y)) - for i in range(len(seq)): - self.assertEqual(x[i] in seq, True) - self.assertEqual(y[i] in seq, True) - self.assertEqual(seq[i] in x, True) - self.assertEqual(seq[i] in y, True) - z = [1] - random.shuffle(z) - self.assertEqual(z, [1]) - if sys.version_info[0] == 3: - z = bytearray(b('12')) - random.shuffle(z) - self.assertEqual(b('1') in z, True) - self.assertRaises(TypeError, random.shuffle, b('12')) - self.assertRaises(TypeError, random.shuffle, 1) - self.assertRaises(TypeError, random.shuffle, "1") - self.assertRaises(TypeError, random.shuffle, (1,2)) - # 2to3 wraps a list() around it, alas - but I want to shoot - # myself in the foot here! :D - # if sys.version_info[0] == 3: - # self.assertRaises(TypeError, random.shuffle, range(3)) - # Test sample - x = random.sample(seq, 20) - y = random.sample(seq, 20) - self.assertNotEqual(x, y) - for i in range(20): - self.assertEqual(x[i] in seq, True) - self.assertEqual(y[i] in seq, True) - z = random.sample([1], 1) - self.assertEqual(z, [1]) - z = random.sample((1,2,3), 1) - self.assertEqual(z[0] in (1,2,3), True) - z = random.sample("123", 1) - self.assertEqual(z[0] in "123", True) - z = random.sample(list(range(3)), 1) - self.assertEqual(z[0] in range(3), True) - if sys.version_info[0] == 3: - z = random.sample(b("123"), 1) - self.assertEqual(z[0] in b("123"), True) - z = random.sample(bytearray(b("123")), 1) - self.assertEqual(z[0] in bytearray(b("123")), True) - self.assertRaises(TypeError, random.sample, 1) - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py deleted file mode 100644 index be538da..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for the Crypto.Util.randpool.RandomPool wrapper class""" - -__revision__ = "$Id$" - -import sys -import unittest - -class SimpleTest(unittest.TestCase): - def runTest(self): - """Crypto.Util.randpool.RandomPool""" - # Import the winrandom module and try to use it - from Crypto.Util.randpool import RandomPool - sys.stderr.write("SelfTest: You can ignore the RandomPool_DeprecationWarning that follows.\n") - rpool = RandomPool() - x = rpool.get_bytes(16) - y = rpool.get_bytes(16) - self.assertNotEqual(x, y) - self.assertNotEqual(rpool.entropy, 0) - - rpool.randomize() - rpool.stir('foo') - rpool.add_event('foo') - -def get_tests(config={}): - return [SimpleTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/__init__.py deleted file mode 100644 index 862763a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Signature/__init__.py: Self-test for signature modules -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for signature modules""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - from . import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) - from . import test_pkcs1_pss; tests += test_pkcs1_pss.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py deleted file mode 100644 index cf09e81..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Signature/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 signatures -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -import unittest - -from Crypto.PublicKey import RSA -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex -from Crypto.Hash import * -from Crypto import Random -from Crypto.Signature import PKCS1_v1_5 as PKCS -from Crypto.Util.py3compat import * - -def isStr(s): - t = '' - try: - t += s - except TypeError: - return 0 - return 1 - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\n', '\t', ' ']: - t = t.replace(c,'') - return t - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = b(rws(t)) - if len(clean)%2 == 1: - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - -class PKCS1_15_Tests(unittest.TestCase): - - # List of tuples with test data for PKCS#1 v1.5. - # Each tuple is made up by: - # Item #0: dictionary with RSA key component, or key to import - # Item #1: data to hash and sign - # Item #2: signature of the data #1, done with the key #0, after - # hashing it with #3 - # Item #3: hash object generator - - _testData = ( - - # - # Taken from ftp://ftp.rsa.com/pub/pkcs/ascii/examples.asc - # "Some Examples of the PKCS Standards", 1999 - # - ( - - # Private key, from 2.1 - { - 'n':'''0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 c0 01 c6 - 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 d0 53 b3 e3 78 2a 1d - e5 dc 5a f4 eb e9 94 68 17 01 14 a1 df e6 7c dc 9a 9a f5 5d - 65 56 20 bb ab''', - 'e':'''01 00 - 01''', - 'd':'''01 23 c5 b6 1b a3 6e db 1d 36 79 90 41 99 a8 9e a8 0c 09 - b9 12 2e 14 00 c0 9a dc f7 78 46 76 d0 1d 23 35 6a 7d 44 d6 - bd 8b d5 0e 94 bf c7 23 fa 87 d8 86 2b 75 17 76 91 c1 1d 75 - 76 92 df 88 81''' - }, - # Data to sign, from 3.1 - '''30 81 a4 02 01 00 30 42 31 0b 30 09 06 - 03 55 04 06 13 02 55 53 31 1d 30 1b 06 03 55 04 0a 13 14 - 45 78 61 6d 70 6c 65 20 4f 72 67 61 6e 69 7a 61 74 69 6f - 6e 31 14 30 12 06 03 55 04 03 13 0b 54 65 73 74 20 55 73 - 65 72 20 31 30 5b 30 0d 06 09 2a 86 48 86 f7 0d 01 01 01 - 05 00 03 4a 00 30 47 02 40 - 0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 - c0 01 c6 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 - d0 53 b3 e3 78 2a 1d e5 dc 5a f4 eb e9 94 68 17 - 01 14 a1 df e6 7c dc 9a 9a f5 5d 65 56 20 bb ab - 02 03 01 00 01''', - # Signature, from 3.2 (at the very end) - '''06 db 36 cb 18 d3 47 5b 9c 01 db 3c 78 95 28 08 - 02 79 bb ae ff 2b 7d 55 8e d6 61 59 87 c8 51 86 - 3f 8a 6c 2c ff bc 89 c3 f7 5a 18 d9 6b 12 7c 71 - 7d 54 d0 d8 04 8d a8 a0 54 46 26 d1 7a 2a 8f be''', - MD2 - ), - - # - # RSA keypair generated with openssl - # - ( - """-----BEGIN RSA PRIVATE KEY----- - MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII - q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 - Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI - OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr - +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK - JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 - n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== - -----END RSA PRIVATE KEY-----""", - "This is a test\x0a", - # - # PKCS#1 signature computed with openssl - # - '''4a700a16432a291a3194646952687d5316458b8b86fb0a25aa30e0dcecdb - 442676759ac63d56ec1499c3ae4c0013c2053cabd5b5804848994541ac16 - fa243a4d''', - SHA - ), - - # - # Test vector from http://www.di-mgt.com.au/rsa_alg.html#signpkcs1 - # - ( - { - 'n':'''E08973398DD8F5F5E88776397F4EB005BB5383DE0FB7ABDC7DC775290D052E6D - 12DFA68626D4D26FAA5829FC97ECFA82510F3080BEB1509E4644F12CBBD832CF - C6686F07D9B060ACBEEE34096A13F5F7050593DF5EBA3556D961FF197FC981E6 - F86CEA874070EFAC6D2C749F2DFA553AB9997702A648528C4EF357385774575F''', - 'e':'''010001''', - 'd':'''00A403C327477634346CA686B57949014B2E8AD2C862B2C7D748096A8B91F736 - F275D6E8CD15906027314735644D95CD6763CEB49F56AC2F376E1CEE0EBF282D - F439906F34D86E085BD5656AD841F313D72D395EFE33CBFF29E4030B3D05A28F - B7F18EA27637B07957D32F2BDE8706227D04665EC91BAF8B1AC3EC9144AB7F21''' - }, - "abc", - '''60AD5A78FB4A4030EC542C8974CD15F55384E836554CEDD9A322D5F4135C6267 - A9D20970C54E6651070B0144D43844C899320DD8FA7819F7EBC6A7715287332E - C8675C136183B3F8A1F81EF969418267130A756FDBB2C71D9A667446E34E0EAD - 9CF31BFB66F816F319D0B7E430A5F2891553986E003720261C7E9022C0D9F11F''', - SHA - ) - - ) - - def testSign1(self): - for i in range(len(self._testData)): - row = self._testData[i] - # Build the key - if isStr(row[0]): - key = RSA.importKey(row[0]) - else: - comps = [ int(rws(row[0][x]),16) for x in ('n','e','d') ] - key = RSA.construct(comps) - h = row[3].new() - # Data to sign can either be in hex form or not - try: - h.update(t2b(row[1])) - except: - h.update(b(row[1])) - # The real test - signer = PKCS.new(key) - self.assertTrue(signer.can_sign()) - s = signer.sign(h) - self.assertEqual(s, t2b(row[2])) - - def testVerify1(self): - for i in range(len(self._testData)): - row = self._testData[i] - # Build the key - if isStr(row[0]): - key = RSA.importKey(row[0]).publickey() - else: - comps = [ int(rws(row[0][x]),16) for x in ('n','e') ] - key = RSA.construct(comps) - h = row[3].new() - # Data to sign can either be in hex form or not - try: - h.update(t2b(row[1])) - except: - h.update(b(row[1])) - # The real test - verifier = PKCS.new(key) - self.assertFalse(verifier.can_sign()) - result = verifier.verify(h, t2b(row[2])) - self.assertTrue(result) - - def testSignVerify(self): - rng = Random.new().read - key = RSA.generate(1024, rng) - - for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,SHA512,RIPEMD): - h = hashmod.new() - h.update(b('blah blah blah')) - - signer = PKCS.new(key) - s = signer.sign(h) - result = signer.verify(h, s) - self.assertTrue(result) - - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PKCS1_15_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py deleted file mode 100644 index 3636ef1..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py +++ /dev/null @@ -1,446 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Signature/test_pkcs1_pss.py: Self-test for PKCS#1 PSS signatures -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - - - -__revision__ = "$Id$" - -import unittest - -from Crypto.PublicKey import RSA -from Crypto import Random -from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex -from Crypto.Hash import * -from Crypto.Signature import PKCS1_PSS as PKCS -from Crypto.Util.py3compat import * - -def isStr(s): - t = '' - try: - t += s - except TypeError: - return 0 - return 1 - -def rws(t): - """Remove white spaces, tabs, and new lines from a string""" - for c in ['\t', '\n', ' ']: - t = t.replace(c,'') - return t - -def t2b(t): - """Convert a text string with bytes in hex form to a byte string""" - clean = b(rws(t)) - if len(clean)%2 == 1: - raise ValueError("Even number of characters expected") - return a2b_hex(clean) - -# Helper class to count how many bytes have been requested -# from the key's private RNG, w/o counting those used for blinding -class MyKey: - def __init__(self, key): - self._key = key - self.n = key.n - self.asked = 0 - def _randfunc(self, N): - self.asked += N - return self._key._randfunc(N) - def sign(self, m): - return self._key.sign(m) - def has_private(self): - return self._key.has_private() - def decrypt(self, m): - return self._key.decrypt(m) - def verify(self, m, p): - return self._key.verify(m, p) - def encrypt(self, m, p): - return self._key.encrypt(m, p) - -class PKCS1_PSS_Tests(unittest.TestCase): - - # List of tuples with test data for PKCS#1 PSS - # Each tuple is made up by: - # Item #0: dictionary with RSA key component, or key to import - # Item #1: data to hash and sign - # Item #2: signature of the data #1, done with the key #0, - # and salt #3 after hashing it with #4 - # Item #3: salt - # Item #4: hash object generator - - _testData = ( - - # - # From in pss-vect.txt to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''a2 ba 40 ee 07 e3 b2 bd 2f 02 ce 22 7f 36 a1 95 - 02 44 86 e4 9c 19 cb 41 bb bd fb ba 98 b2 2b 0e - 57 7c 2e ea ff a2 0d 88 3a 76 e6 5e 39 4c 69 d4 - b3 c0 5a 1e 8f ad da 27 ed b2 a4 2b c0 00 fe 88 - 8b 9b 32 c2 2d 15 ad d0 cd 76 b3 e7 93 6e 19 95 - 5b 22 0d d1 7d 4e a9 04 b1 ec 10 2b 2e 4d e7 75 - 12 22 aa 99 15 10 24 c7 cb 41 cc 5e a2 1d 00 ee - b4 1f 7c 80 08 34 d2 c6 e0 6b ce 3b ce 7e a9 a5''', - 'e':'''01 00 01''', - # In the test vector, only p and q were given... - # d is computed offline as e^{-1} mod (p-1)(q-1) - 'd':'''50e2c3e38d886110288dfc68a9533e7e12e27d2aa56 - d2cdb3fb6efa990bcff29e1d2987fb711962860e7391b1ce01 - ebadb9e812d2fbdfaf25df4ae26110a6d7a26f0b810f54875e - 17dd5c9fb6d641761245b81e79f8c88f0e55a6dcd5f133abd3 - 5f8f4ec80adf1bf86277a582894cb6ebcd2162f1c7534f1f49 - 47b129151b71''' - }, - - # Data to sign - '''85 9e ef 2f d7 8a ca 00 30 8b dc 47 11 93 bf 55 - bf 9d 78 db 8f 8a 67 2b 48 46 34 f3 c9 c2 6e 64 - 78 ae 10 26 0f e0 dd 8c 08 2e 53 a5 29 3a f2 17 - 3c d5 0c 6d 5d 35 4f eb f7 8b 26 02 1c 25 c0 27 - 12 e7 8c d4 69 4c 9f 46 97 77 e4 51 e7 f8 e9 e0 - 4c d3 73 9c 6b bf ed ae 48 7f b5 56 44 e9 ca 74 - ff 77 a5 3c b7 29 80 2f 6e d4 a5 ff a8 ba 15 98 - 90 fc''', - # Signature - '''8d aa 62 7d 3d e7 59 5d 63 05 6c 7e c6 59 e5 44 - 06 f1 06 10 12 8b aa e8 21 c8 b2 a0 f3 93 6d 54 - dc 3b dc e4 66 89 f6 b7 95 1b b1 8e 84 05 42 76 - 97 18 d5 71 5d 21 0d 85 ef bb 59 61 92 03 2c 42 - be 4c 29 97 2c 85 62 75 eb 6d 5a 45 f0 5f 51 87 - 6f c6 74 3d ed dd 28 ca ec 9b b3 0e a9 9e 02 c3 - 48 82 69 60 4f e4 97 f7 4c cd 7c 7f ca 16 71 89 - 71 23 cb d3 0d ef 5d 54 a2 b5 53 6a d9 0a 74 7e''', - # Salt - '''e3 b5 d5 d0 02 c1 bc e5 0c 2b 65 ef 88 a1 88 d8 - 3b ce 7e 61''', - # Hash algorithm - SHA - ), - - # - # Example 1.1 to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 - 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 - d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 - 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df - d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 - c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 - 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 - ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', - 'e':'''01 00 01''', - 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 - 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 - 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 - c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a - e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 - a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c - 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b - d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' - }, - # Message - '''cd c8 7d a2 23 d7 86 df 3b 45 e0 bb bc 72 13 26 - d1 ee 2a f8 06 cc 31 54 75 cc 6f 0d 9c 66 e1 b6 - 23 71 d4 5c e2 39 2e 1a c9 28 44 c3 10 10 2f 15 - 6a 0d 8d 52 c1 f4 c4 0b a3 aa 65 09 57 86 cb 76 - 97 57 a6 56 3b a9 58 fe d0 bc c9 84 e8 b5 17 a3 - d5 f5 15 b2 3b 8a 41 e7 4a a8 67 69 3f 90 df b0 - 61 a6 e8 6d fa ae e6 44 72 c0 0e 5f 20 94 57 29 - cb eb e7 7f 06 ce 78 e0 8f 40 98 fb a4 1f 9d 61 - 93 c0 31 7e 8b 60 d4 b6 08 4a cb 42 d2 9e 38 08 - a3 bc 37 2d 85 e3 31 17 0f cb f7 cc 72 d0 b7 1c - 29 66 48 b3 a4 d1 0f 41 62 95 d0 80 7a a6 25 ca - b2 74 4f d9 ea 8f d2 23 c4 25 37 02 98 28 bd 16 - be 02 54 6f 13 0f d2 e3 3b 93 6d 26 76 e0 8a ed - 1b 73 31 8b 75 0a 01 67 d0''', - # Signature - '''90 74 30 8f b5 98 e9 70 1b 22 94 38 8e 52 f9 71 - fa ac 2b 60 a5 14 5a f1 85 df 52 87 b5 ed 28 87 - e5 7c e7 fd 44 dc 86 34 e4 07 c8 e0 e4 36 0b c2 - 26 f3 ec 22 7f 9d 9e 54 63 8e 8d 31 f5 05 12 15 - df 6e bb 9c 2f 95 79 aa 77 59 8a 38 f9 14 b5 b9 - c1 bd 83 c4 e2 f9 f3 82 a0 d0 aa 35 42 ff ee 65 - 98 4a 60 1b c6 9e b2 8d eb 27 dc a1 2c 82 c2 d4 - c3 f6 6c d5 00 f1 ff 2b 99 4d 8a 4e 30 cb b3 3c''', - # Salt - '''de e9 59 c7 e0 64 11 36 14 20 ff 80 18 5e d5 7f - 3e 67 76 af''', - # Hash - SHA - ), - - # - # Example 1.2 to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 - 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 - d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 - 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df - d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 - c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 - 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 - ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', - 'e':'''01 00 01''', - 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 - 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 - 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 - c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a - e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 - a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c - 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b - d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' - }, - # Message - '''85 13 84 cd fe 81 9c 22 ed 6c 4c cb 30 da eb 5c - f0 59 bc 8e 11 66 b7 e3 53 0c 4c 23 3e 2b 5f 8f - 71 a1 cc a5 82 d4 3e cc 72 b1 bc a1 6d fc 70 13 - 22 6b 9e''', - # Signature - '''3e f7 f4 6e 83 1b f9 2b 32 27 41 42 a5 85 ff ce - fb dc a7 b3 2a e9 0d 10 fb 0f 0c 72 99 84 f0 4e - f2 9a 9d f0 78 07 75 ce 43 73 9b 97 83 83 90 db - 0a 55 05 e6 3d e9 27 02 8d 9d 29 b2 19 ca 2c 45 - 17 83 25 58 a5 5d 69 4a 6d 25 b9 da b6 60 03 c4 - cc cd 90 78 02 19 3b e5 17 0d 26 14 7d 37 b9 35 - 90 24 1b e5 1c 25 05 5f 47 ef 62 75 2c fb e2 14 - 18 fa fe 98 c2 2c 4d 4d 47 72 4f db 56 69 e8 43''', - # Salt - '''ef 28 69 fa 40 c3 46 cb 18 3d ab 3d 7b ff c9 8f - d5 6d f4 2d''', - # Hash - SHA - ), - - # - # Example 2.1 to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''01 d4 0c 1b cf 97 a6 8a e7 cd bd 8a 7b f3 e3 4f - a1 9d cc a4 ef 75 a4 74 54 37 5f 94 51 4d 88 fe - d0 06 fb 82 9f 84 19 ff 87 d6 31 5d a6 8a 1f f3 - a0 93 8e 9a bb 34 64 01 1c 30 3a d9 91 99 cf 0c - 7c 7a 8b 47 7d ce 82 9e 88 44 f6 25 b1 15 e5 e9 - c4 a5 9c f8 f8 11 3b 68 34 33 6a 2f d2 68 9b 47 - 2c bb 5e 5c ab e6 74 35 0c 59 b6 c1 7e 17 68 74 - fb 42 f8 fc 3d 17 6a 01 7e dc 61 fd 32 6c 4b 33 - c9''', - 'e':'''01 00 01''', - 'd':'''02 7d 14 7e 46 73 05 73 77 fd 1e a2 01 56 57 72 - 17 6a 7d c3 83 58 d3 76 04 56 85 a2 e7 87 c2 3c - 15 57 6b c1 6b 9f 44 44 02 d6 bf c5 d9 8a 3e 88 - ea 13 ef 67 c3 53 ec a0 c0 dd ba 92 55 bd 7b 8b - b5 0a 64 4a fd fd 1d d5 16 95 b2 52 d2 2e 73 18 - d1 b6 68 7a 1c 10 ff 75 54 5f 3d b0 fe 60 2d 5f - 2b 7f 29 4e 36 01 ea b7 b9 d1 ce cd 76 7f 64 69 - 2e 3e 53 6c a2 84 6c b0 c2 dd 48 6a 39 fa 75 b1''' - }, - # Message - '''da ba 03 20 66 26 3f ae db 65 98 48 11 52 78 a5 - 2c 44 fa a3 a7 6f 37 51 5e d3 36 32 10 72 c4 0a - 9d 9b 53 bc 05 01 40 78 ad f5 20 87 51 46 aa e7 - 0f f0 60 22 6d cb 7b 1f 1f c2 7e 93 60''', - # Signature - '''01 4c 5b a5 33 83 28 cc c6 e7 a9 0b f1 c0 ab 3f - d6 06 ff 47 96 d3 c1 2e 4b 63 9e d9 13 6a 5f ec - 6c 16 d8 88 4b dd 99 cf dc 52 14 56 b0 74 2b 73 - 68 68 cf 90 de 09 9a db 8d 5f fd 1d ef f3 9b a4 - 00 7a b7 46 ce fd b2 2d 7d f0 e2 25 f5 46 27 dc - 65 46 61 31 72 1b 90 af 44 53 63 a8 35 8b 9f 60 - 76 42 f7 8f ab 0a b0 f4 3b 71 68 d6 4b ae 70 d8 - 82 78 48 d8 ef 1e 42 1c 57 54 dd f4 2c 25 89 b5 - b3''', - # Salt - '''57 bf 16 0b cb 02 bb 1d c7 28 0c f0 45 85 30 b7 - d2 83 2f f7''', - SHA - ), - - # - # Example 8.1 to be found in - # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip - # - ( - # Private key - { - 'n':'''49 53 70 a1 fb 18 54 3c 16 d3 63 1e 31 63 25 5d - f6 2b e6 ee e8 90 d5 f2 55 09 e4 f7 78 a8 ea 6f - bb bc df 85 df f6 4e 0d 97 20 03 ab 36 81 fb ba - 6d d4 1f d5 41 82 9b 2e 58 2d e9 f2 a4 a4 e0 a2 - d0 90 0b ef 47 53 db 3c ee 0e e0 6c 7d fa e8 b1 - d5 3b 59 53 21 8f 9c ce ea 69 5b 08 66 8e de aa - dc ed 94 63 b1 d7 90 d5 eb f2 7e 91 15 b4 6c ad - 4d 9a 2b 8e fa b0 56 1b 08 10 34 47 39 ad a0 73 - 3f''', - 'e':'''01 00 01''', - 'd':'''6c 66 ff e9 89 80 c3 8f cd ea b5 15 98 98 83 61 - 65 f4 b4 b8 17 c4 f6 a8 d4 86 ee 4e a9 13 0f e9 - b9 09 2b d1 36 d1 84 f9 5f 50 4a 60 7e ac 56 58 - 46 d2 fd d6 59 7a 89 67 c7 39 6e f9 5a 6e ee bb - 45 78 a6 43 96 6d ca 4d 8e e3 de 84 2d e6 32 79 - c6 18 15 9c 1a b5 4a 89 43 7b 6a 61 20 e4 93 0a - fb 52 a4 ba 6c ed 8a 49 47 ac 64 b3 0a 34 97 cb - e7 01 c2 d6 26 6d 51 72 19 ad 0e c6 d3 47 db e9''' - }, - # Message - '''81 33 2f 4b e6 29 48 41 5e a1 d8 99 79 2e ea cf - 6c 6e 1d b1 da 8b e1 3b 5c ea 41 db 2f ed 46 70 - 92 e1 ff 39 89 14 c7 14 25 97 75 f5 95 f8 54 7f - 73 56 92 a5 75 e6 92 3a f7 8f 22 c6 99 7d db 90 - fb 6f 72 d7 bb 0d d5 74 4a 31 de cd 3d c3 68 58 - 49 83 6e d3 4a ec 59 63 04 ad 11 84 3c 4f 88 48 - 9f 20 97 35 f5 fb 7f da f7 ce c8 ad dc 58 18 16 - 8f 88 0a cb f4 90 d5 10 05 b7 a8 e8 4e 43 e5 42 - 87 97 75 71 dd 99 ee a4 b1 61 eb 2d f1 f5 10 8f - 12 a4 14 2a 83 32 2e db 05 a7 54 87 a3 43 5c 9a - 78 ce 53 ed 93 bc 55 08 57 d7 a9 fb''', - # Signature - '''02 62 ac 25 4b fa 77 f3 c1 ac a2 2c 51 79 f8 f0 - 40 42 2b 3c 5b af d4 0a 8f 21 cf 0f a5 a6 67 cc - d5 99 3d 42 db af b4 09 c5 20 e2 5f ce 2b 1e e1 - e7 16 57 7f 1e fa 17 f3 da 28 05 2f 40 f0 41 9b - 23 10 6d 78 45 aa f0 11 25 b6 98 e7 a4 df e9 2d - 39 67 bb 00 c4 d0 d3 5b a3 55 2a b9 a8 b3 ee f0 - 7c 7f ec db c5 42 4a c4 db 1e 20 cb 37 d0 b2 74 - 47 69 94 0e a9 07 e1 7f bb ca 67 3b 20 52 23 80 - c5''', - # Salt - '''1d 65 49 1d 79 c8 64 b3 73 00 9b e6 f6 f2 46 7b - ac 4c 78 fa''', - SHA - ) - ) - - def testSign1(self): - for i in range(len(self._testData)): - # Build the key - comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e','d') ] - key = MyKey(RSA.construct(comps)) - # Hash function - h = self._testData[i][4].new() - # Data to sign - h.update(t2b(self._testData[i][1])) - # Salt - test_salt = t2b(self._testData[i][3]) - key._randfunc = lambda N: test_salt - # The real test - signer = PKCS.new(key) - self.assertTrue(signer.can_sign()) - s = signer.sign(h) - self.assertEqual(s, t2b(self._testData[i][2])) - - def testVerify1(self): - for i in range(len(self._testData)): - # Build the key - comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e') ] - key = MyKey(RSA.construct(comps)) - # Hash function - h = self._testData[i][4].new() - # Data to sign - h.update(t2b(self._testData[i][1])) - # Salt - test_salt = t2b(self._testData[i][3]) - # The real test - key._randfunc = lambda N: test_salt - verifier = PKCS.new(key) - self.assertFalse(verifier.can_sign()) - result = verifier.verify(h, t2b(self._testData[i][2])) - self.assertTrue(result) - - def testSignVerify(self): - h = SHA.new() - h.update(b('blah blah blah')) - - rng = Random.new().read - key = MyKey(RSA.generate(1024,rng)) - - # Helper function to monitor what's request from MGF - global mgfcalls - def newMGF(seed,maskLen): - global mgfcalls - mgfcalls += 1 - return bchr(0x00)*maskLen - - # Verify that PSS is friendly to all ciphers - for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,RIPEMD): - h = hashmod.new() - h.update(b('blah blah blah')) - - # Verify that sign() asks for as many random bytes - # as the hash output size - key.asked = 0 - signer = PKCS.new(key) - s = signer.sign(h) - self.assertTrue(signer.verify(h, s)) - self.assertEqual(key.asked, h.digest_size) - - h = SHA.new() - h.update(b('blah blah blah')) - - # Verify that sign() uses a different salt length - for sLen in (0,3,21): - key.asked = 0 - signer = PKCS.new(key, saltLen=sLen) - s = signer.sign(h) - self.assertEqual(key.asked, sLen) - self.assertTrue(signer.verify(h, s)) - - # Verify that sign() uses the custom MGF - mgfcalls = 0 - signer = PKCS.new(key, newMGF) - s = signer.sign(h) - self.assertEqual(mgfcalls, 1) - self.assertTrue(signer.verify(h, s)) - - # Verify that sign() does not call the RNG - # when salt length is 0, even when a new MGF is provided - key.asked = 0 - mgfcalls = 0 - signer = PKCS.new(key, newMGF, 0) - s = signer.sign(h) - self.assertEqual(key.asked,0) - self.assertEqual(mgfcalls, 1) - self.assertTrue(signer.verify(h, s)) - -def get_tests(config={}): - tests = [] - tests += list_test_cases(PKCS1_PSS_Tests) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/__init__.py deleted file mode 100644 index abd640a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/__init__.py: Self-test for utility modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test for utility modules""" - -__revision__ = "$Id$" - -import os - -def get_tests(config={}): - tests = [] - if os.name == 'nt': - from Crypto.SelfTest.Util import test_winrandom; tests += test_winrandom.get_tests(config=config) - from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) - from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) - return tests - -if __name__ == '__main__': - import unittest - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_Counter.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_Counter.py deleted file mode 100644 index 339ce60..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_Counter.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_Counter: Self-test for the Crypto.Util.Counter module -# -# Written in 2009 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for Crypto.Util.Counter""" - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -import unittest - -class CounterTests(unittest.TestCase): - def setUp(self): - global Counter - from Crypto.Util import Counter - - def test_BE_shortcut(self): - """Big endian, shortcut enabled""" - c = Counter.new(128) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - c = Counter.new(128, little_endian=False) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - c = Counter.new(128, disable_shortcut=False) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - c = Counter.new(128, little_endian=False, disable_shortcut=False) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - - def test_LE_shortcut(self): - """Little endian, shortcut enabled""" - c = Counter.new(128, little_endian=True) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - c = Counter.new(128, little_endian=True, disable_shortcut=False) - self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ - - def test_BE_no_shortcut(self): - """Big endian, shortcut disabled""" - c = Counter.new(128, disable_shortcut=True) - self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') - c = Counter.new(128, little_endian=False, disable_shortcut=True) - self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') - - def test_LE_no_shortcut(self): - """Little endian, shortcut disabled""" - c = Counter.new(128, little_endian=True, disable_shortcut=True) - self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') - - def test_BE_defaults(self): - """128-bit, Big endian, defaults""" - c = Counter.new(128) - self.assertEqual(1, c.next_value()) - self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01"), c()) - self.assertEqual(2, c.next_value()) - self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02"), c()) - for i in range(3, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")+bchr(i), c()) - self.assertEqual(256, c.next_value()) - self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"), c()) - - def test_LE_defaults(self): - """128-bit, Little endian, defaults""" - c = Counter.new(128, little_endian=True) - self.assertEqual(1, c.next_value()) - self.assertEqual(b("\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) - self.assertEqual(2, c.next_value()) - self.assertEqual(b("\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) - for i in range(3, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i)+b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) - self.assertEqual(256, c.next_value()) - self.assertEqual(b("\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) - - def test_BE8_wraparound(self): - """8-bit, Big endian, wraparound""" - c = Counter.new(8) - for i in range(1, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertRaises(OverflowError, c.next_value) - self.assertRaises(OverflowError, c) - self.assertRaises(OverflowError, c.next_value) - self.assertRaises(OverflowError, c) - - def test_LE8_wraparound(self): - """8-bit, Little endian, wraparound""" - c = Counter.new(8, little_endian=True) - for i in range(1, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertRaises(OverflowError, c.next_value) - self.assertRaises(OverflowError, c) - self.assertRaises(OverflowError, c.next_value) - self.assertRaises(OverflowError, c) - - def test_BE8_wraparound_allowed(self): - """8-bit, Big endian, wraparound with allow_wraparound=True""" - c = Counter.new(8, allow_wraparound=True) - for i in range(1, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertEqual(0, c.next_value()) - self.assertEqual(b("\x00"), c()) - self.assertEqual(1, c.next_value()) - - def test_LE8_wraparound_allowed(self): - """8-bit, Little endian, wraparound with allow_wraparound=True""" - c = Counter.new(8, little_endian=True, allow_wraparound=True) - for i in range(1, 256): - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertEqual(0, c.next_value()) - self.assertEqual(b("\x00"), c()) - self.assertEqual(1, c.next_value()) - - def test_BE8_carry(self): - """8-bit, Big endian, carry attribute""" - c = Counter.new(8) - for i in range(1, 256): - self.assertEqual(0, c.carry) - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertEqual(1, c.carry) - - def test_LE8_carry(self): - """8-bit, Little endian, carry attribute""" - c = Counter.new(8, little_endian=True) - for i in range(1, 256): - self.assertEqual(0, c.carry) - self.assertEqual(i, c.next_value()) - self.assertEqual(bchr(i), c()) - self.assertEqual(1, c.carry) - -def get_tests(config={}): - from Crypto.SelfTest.st_common import list_test_cases - return list_test_cases(CounterTests) - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_asn1.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_asn1.py deleted file mode 100644 index fff9286..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_asn1.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for Crypto.Util.asn1""" - -__revision__ = "$Id$" - -import unittest -import sys - -from Crypto.Util.py3compat import * -from Crypto.Util.asn1 import DerSequence, DerObject - -class DerObjectTests(unittest.TestCase): - - def testObjEncode1(self): - # No payload - der = DerObject(b('\x33')) - self.assertEqual(der.encode(), b('\x33\x00')) - # Small payload - der.payload = b('\x45') - self.assertEqual(der.encode(), b('\x33\x01\x45')) - # Invariant - self.assertEqual(der.encode(), b('\x33\x01\x45')) - # Initialize with numerical tag - der = DerObject(b(0x33)) - der.payload = b('\x45') - self.assertEqual(der.encode(), b('\x33\x01\x45')) - - def testObjEncode2(self): - # Known types - der = DerObject('SEQUENCE') - self.assertEqual(der.encode(), b('\x30\x00')) - der = DerObject('BIT STRING') - self.assertEqual(der.encode(), b('\x03\x00')) - - def testObjEncode3(self): - # Long payload - der = DerObject(b('\x34')) - der.payload = b("0")*128 - self.assertEqual(der.encode(), b('\x34\x81\x80' + "0"*128)) - - def testObjDecode1(self): - # Decode short payload - der = DerObject() - der.decode(b('\x20\x02\x01\x02')) - self.assertEqual(der.payload, b("\x01\x02")) - self.assertEqual(der.typeTag, 0x20) - - def testObjDecode2(self): - # Decode short payload - der = DerObject() - der.decode(b('\x22\x81\x80' + "1"*128)) - self.assertEqual(der.payload, b("1")*128) - self.assertEqual(der.typeTag, 0x22) - -class DerSequenceTests(unittest.TestCase): - - def testEncode1(self): - # Empty sequence - der = DerSequence() - self.assertEqual(der.encode(), b('0\x00')) - self.assertFalse(der.hasOnlyInts()) - # One single-byte integer (zero) - der.append(0) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) - self.assertTrue(der.hasOnlyInts()) - # Invariant - self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) - - def testEncode2(self): - # One single-byte integer (non-zero) - der = DerSequence() - der.append(127) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x7f')) - # Indexing - der[0] = 1 - self.assertEqual(len(der),1) - self.assertEqual(der[0],1) - self.assertEqual(der[-1],1) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) - # - der[:] = [1] - self.assertEqual(len(der),1) - self.assertEqual(der[0],1) - self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) - - def testEncode3(self): - # One multi-byte integer (non-zero) - der = DerSequence() - der.append(0x180) - self.assertEqual(der.encode(), b('0\x04\x02\x02\x01\x80')) - - def testEncode4(self): - # One very long integer - der = DerSequence() - der.append(2**2048) - self.assertEqual(der.encode(), b('0\x82\x01\x05')+ - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - - def testEncode5(self): - # One single-byte integer (looks negative) - der = DerSequence() - der.append(0xFF) - self.assertEqual(der.encode(), b('0\x04\x02\x02\x00\xff')) - - def testEncode6(self): - # Two integers - der = DerSequence() - der.append(0x180) - der.append(0xFF) - self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) - self.assertTrue(der.hasOnlyInts()) - # - der.append(0x01) - der[1:] = [9,8] - self.assertEqual(len(der),3) - self.assertEqual(der[1:],[9,8]) - self.assertEqual(der[1:-1],[9]) - self.assertEqual(der.encode(), b('0\x0A\x02\x02\x01\x80\x02\x01\x09\x02\x01\x08')) - - def testEncode6(self): - # One integer and another type (no matter what it is) - der = DerSequence() - der.append(0x180) - der.append(b('\x00\x02\x00\x00')) - self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x00\x02\x00\x00')) - self.assertFalse(der.hasOnlyInts()) - - #### - - def testDecode1(self): - # Empty sequence - der = DerSequence() - der.decode(b('0\x00')) - self.assertEqual(len(der),0) - # One single-byte integer (zero) - der.decode(b('0\x03\x02\x01\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0) - # Invariant - der.decode(b('0\x03\x02\x01\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0) - - def testDecode2(self): - # One single-byte integer (non-zero) - der = DerSequence() - der.decode(b('0\x03\x02\x01\x7f')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],127) - - def testDecode3(self): - # One multi-byte integer (non-zero) - der = DerSequence() - der.decode(b('0\x04\x02\x02\x01\x80')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0x180) - - def testDecode4(self): - # One very long integer - der = DerSequence() - der.decode(b('0\x82\x01\x05')+ - b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ - b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],2**2048) - - def testDecode5(self): - # One single-byte integer (looks negative) - der = DerSequence() - der.decode(b('0\x04\x02\x02\x00\xff')) - self.assertEqual(len(der),1) - self.assertEqual(der[0],0xFF) - - def testDecode6(self): - # Two integers - der = DerSequence() - der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) - self.assertEqual(len(der),2) - self.assertEqual(der[0],0x180) - self.assertEqual(der[1],0xFF) - - def testDecode7(self): - # One integer and 2 other types - der = DerSequence() - der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) - self.assertEqual(len(der),3) - self.assertEqual(der[0],0x180) - self.assertEqual(der[1],b('\x24\x02\xb6\x63')) - self.assertEqual(der[2],b('\x12\x00')) - - def testDecode8(self): - # Only 2 other types - der = DerSequence() - der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) - self.assertEqual(len(der),2) - self.assertEqual(der[0],b('\x24\x02\xb6\x63')) - self.assertEqual(der[1],b('\x12\x00')) - - def testErrDecode1(self): - # Not a sequence - der = DerSequence() - self.assertRaises(ValueError, der.decode, b('')) - self.assertRaises(ValueError, der.decode, b('\x00')) - self.assertRaises(ValueError, der.decode, b('\x30')) - - def testErrDecode2(self): - # Wrong payload type - der = DerSequence() - self.assertRaises(ValueError, der.decode, b('\x30\x00\x00'), True) - - def testErrDecode3(self): - # Wrong length format - der = DerSequence() - self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) - self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) - self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) - - def testErrDecode4(self): - # Wrong integer format - der = DerSequence() - # Multi-byte encoding for zero - #self.assertRaises(ValueError, der.decode, '\x30\x04\x02\x02\x00\x00') - # Negative integer - self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\xFF')) - -def get_tests(config={}): - from Crypto.SelfTest.st_common import list_test_cases - listTests = [] - listTests += list_test_cases(DerObjectTests) - listTests += list_test_cases(DerSequenceTests) - return listTests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_number.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_number.py deleted file mode 100644 index d7d3024..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_number.py +++ /dev/null @@ -1,295 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_number.py: Self-test for parts of the Crypto.Util.number module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-tests for (some of) Crypto.Util.number""" - -__revision__ = "$Id$" - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -import unittest - -# NB: In some places, we compare tuples instead of just output values so that -# if any inputs cause a test failure, we'll be able to tell which ones. - -class MiscTests(unittest.TestCase): - def setUp(self): - global number, math - from Crypto.Util import number - import math - - def test_ceil_shift(self): - """Util.number.ceil_shift""" - self.assertRaises(AssertionError, number.ceil_shift, -1, 1) - self.assertRaises(AssertionError, number.ceil_shift, 1, -1) - - # b = 0 - self.assertEqual(0, number.ceil_shift(0, 0)) - self.assertEqual(1, number.ceil_shift(1, 0)) - self.assertEqual(2, number.ceil_shift(2, 0)) - self.assertEqual(3, number.ceil_shift(3, 0)) - - # b = 1 - self.assertEqual(0, number.ceil_shift(0, 1)) - self.assertEqual(1, number.ceil_shift(1, 1)) - self.assertEqual(1, number.ceil_shift(2, 1)) - self.assertEqual(2, number.ceil_shift(3, 1)) - - # b = 2 - self.assertEqual(0, number.ceil_shift(0, 2)) - self.assertEqual(1, number.ceil_shift(1, 2)) - self.assertEqual(1, number.ceil_shift(2, 2)) - self.assertEqual(1, number.ceil_shift(3, 2)) - self.assertEqual(1, number.ceil_shift(4, 2)) - self.assertEqual(2, number.ceil_shift(5, 2)) - self.assertEqual(2, number.ceil_shift(6, 2)) - self.assertEqual(2, number.ceil_shift(7, 2)) - self.assertEqual(2, number.ceil_shift(8, 2)) - self.assertEqual(3, number.ceil_shift(9, 2)) - - for b in range(3, 1+129, 3): # 3, 6, ... , 129 - self.assertEqual(0, number.ceil_shift(0, b)) - - n = 1 - while n <= 2**(b+2): - (q, r) = divmod(n-1, 2**b) - expected = q + int(not not r) - self.assertEqual((n-1, b, expected), - (n-1, b, number.ceil_shift(n-1, b))) - - (q, r) = divmod(n, 2**b) - expected = q + int(not not r) - self.assertEqual((n, b, expected), - (n, b, number.ceil_shift(n, b))) - - (q, r) = divmod(n+1, 2**b) - expected = q + int(not not r) - self.assertEqual((n+1, b, expected), - (n+1, b, number.ceil_shift(n+1, b))) - - n *= 2 - - def test_ceil_div(self): - """Util.number.ceil_div""" - self.assertRaises(TypeError, number.ceil_div, "1", 1) - self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) - self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) - - # b = -1 - self.assertEqual(0, number.ceil_div(0, -1)) - self.assertEqual(-1, number.ceil_div(1, -1)) - self.assertEqual(-2, number.ceil_div(2, -1)) - self.assertEqual(-3, number.ceil_div(3, -1)) - - # b = 1 - self.assertEqual(0, number.ceil_div(0, 1)) - self.assertEqual(1, number.ceil_div(1, 1)) - self.assertEqual(2, number.ceil_div(2, 1)) - self.assertEqual(3, number.ceil_div(3, 1)) - - # b = 2 - self.assertEqual(0, number.ceil_div(0, 2)) - self.assertEqual(1, number.ceil_div(1, 2)) - self.assertEqual(1, number.ceil_div(2, 2)) - self.assertEqual(2, number.ceil_div(3, 2)) - self.assertEqual(2, number.ceil_div(4, 2)) - self.assertEqual(3, number.ceil_div(5, 2)) - - # b = 3 - self.assertEqual(0, number.ceil_div(0, 3)) - self.assertEqual(1, number.ceil_div(1, 3)) - self.assertEqual(1, number.ceil_div(2, 3)) - self.assertEqual(1, number.ceil_div(3, 3)) - self.assertEqual(2, number.ceil_div(4, 3)) - self.assertEqual(2, number.ceil_div(5, 3)) - self.assertEqual(2, number.ceil_div(6, 3)) - self.assertEqual(3, number.ceil_div(7, 3)) - - # b = 4 - self.assertEqual(0, number.ceil_div(0, 4)) - self.assertEqual(1, number.ceil_div(1, 4)) - self.assertEqual(1, number.ceil_div(2, 4)) - self.assertEqual(1, number.ceil_div(3, 4)) - self.assertEqual(1, number.ceil_div(4, 4)) - self.assertEqual(2, number.ceil_div(5, 4)) - self.assertEqual(2, number.ceil_div(6, 4)) - self.assertEqual(2, number.ceil_div(7, 4)) - self.assertEqual(2, number.ceil_div(8, 4)) - self.assertEqual(3, number.ceil_div(9, 4)) - - # b = -4 - self.assertEqual(3, number.ceil_div(-9, -4)) - self.assertEqual(2, number.ceil_div(-8, -4)) - self.assertEqual(2, number.ceil_div(-7, -4)) - self.assertEqual(2, number.ceil_div(-6, -4)) - self.assertEqual(2, number.ceil_div(-5, -4)) - self.assertEqual(1, number.ceil_div(-4, -4)) - self.assertEqual(1, number.ceil_div(-3, -4)) - self.assertEqual(1, number.ceil_div(-2, -4)) - self.assertEqual(1, number.ceil_div(-1, -4)) - self.assertEqual(0, number.ceil_div(0, -4)) - self.assertEqual(0, number.ceil_div(1, -4)) - self.assertEqual(0, number.ceil_div(2, -4)) - self.assertEqual(0, number.ceil_div(3, -4)) - self.assertEqual(-1, number.ceil_div(4, -4)) - self.assertEqual(-1, number.ceil_div(5, -4)) - self.assertEqual(-1, number.ceil_div(6, -4)) - self.assertEqual(-1, number.ceil_div(7, -4)) - self.assertEqual(-2, number.ceil_div(8, -4)) - self.assertEqual(-2, number.ceil_div(9, -4)) - - def test_exact_log2(self): - """Util.number.exact_log2""" - self.assertRaises(TypeError, number.exact_log2, "0") - self.assertRaises(ValueError, number.exact_log2, -1) - self.assertRaises(ValueError, number.exact_log2, 0) - self.assertEqual(0, number.exact_log2(1)) - self.assertEqual(1, number.exact_log2(2)) - self.assertRaises(ValueError, number.exact_log2, 3) - self.assertEqual(2, number.exact_log2(4)) - self.assertRaises(ValueError, number.exact_log2, 5) - self.assertRaises(ValueError, number.exact_log2, 6) - self.assertRaises(ValueError, number.exact_log2, 7) - e = 3 - n = 8 - while e < 16: - if n == 2**e: - self.assertEqual(e, number.exact_log2(n), "expected=2**%d, n=%d" % (e, n)) - e += 1 - else: - self.assertRaises(ValueError, number.exact_log2, n) - n += 1 - - for e in range(16, 1+64, 2): - self.assertRaises(ValueError, number.exact_log2, 2**e-1) - self.assertEqual(e, number.exact_log2(2**e)) - self.assertRaises(ValueError, number.exact_log2, 2**e+1) - - def test_exact_div(self): - """Util.number.exact_div""" - - # Positive numbers - self.assertEqual(1, number.exact_div(1, 1)) - self.assertRaises(ValueError, number.exact_div, 1, 2) - self.assertEqual(1, number.exact_div(2, 2)) - self.assertRaises(ValueError, number.exact_div, 3, 2) - self.assertEqual(2, number.exact_div(4, 2)) - - # Negative numbers - self.assertEqual(-1, number.exact_div(-1, 1)) - self.assertEqual(-1, number.exact_div(1, -1)) - self.assertRaises(ValueError, number.exact_div, -1, 2) - self.assertEqual(1, number.exact_div(-2, -2)) - self.assertEqual(-2, number.exact_div(-4, 2)) - - # Zero dividend - self.assertEqual(0, number.exact_div(0, 1)) - self.assertEqual(0, number.exact_div(0, 2)) - - # Zero divisor (allow_divzero == False) - self.assertRaises(ZeroDivisionError, number.exact_div, 0, 0) - self.assertRaises(ZeroDivisionError, number.exact_div, 1, 0) - - # Zero divisor (allow_divzero == True) - self.assertEqual(0, number.exact_div(0, 0, allow_divzero=True)) - self.assertRaises(ValueError, number.exact_div, 1, 0, allow_divzero=True) - - def test_floor_div(self): - """Util.number.floor_div""" - self.assertRaises(TypeError, number.floor_div, "1", 1) - for a in range(-10, 10): - for b in range(-10, 10): - if b == 0: - self.assertRaises(ZeroDivisionError, number.floor_div, a, b) - else: - self.assertEqual((a, b, int(math.floor(float(a) / b))), - (a, b, number.floor_div(a, b))) - - def test_getStrongPrime(self): - """Util.number.getStrongPrime""" - self.assertRaises(ValueError, number.getStrongPrime, 256) - self.assertRaises(ValueError, number.getStrongPrime, 513) - bits = 512 - x = number.getStrongPrime(bits) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - e = 2**16+1 - x = number.getStrongPrime(bits, e) - self.assertEqual(number.GCD(x-1, e), 1) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - e = 2**16+2 - x = number.getStrongPrime(bits, e) - self.assertEqual(number.GCD((x-1)>>1, e), 1) - self.assertNotEqual(x % 2, 0) - self.assertEqual(x > (1 << bits-1)-1, 1) - self.assertEqual(x < (1 << bits), 1) - - def test_isPrime(self): - """Util.number.isPrime""" - self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime - self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime - self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCrypto to crash. - self.assertEqual(number.isPrime(2), True) - self.assertEqual(number.isPrime(3), True) - self.assertEqual(number.isPrime(4), False) - self.assertEqual(number.isPrime(2**1279-1), True) - self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime - # test some known gmp pseudo-primes taken from - # http://www.trnicely.net/misc/mpzspsp.html - for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, - 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, - 4859419 * 9718837, 2730439 * 5460877, - 245127919 * 490255837, 963939391 * 1927878781, - 4186358431 * 8372716861, 1576820467 * 3153640933): - self.assertEqual(number.isPrime(int(composite)), False) - - def test_size(self): - self.assertEqual(number.size(2),2) - self.assertEqual(number.size(3),2) - self.assertEqual(number.size(0xa2),8) - self.assertEqual(number.size(0xa2ba40),8*3) - self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) - - def test_negative_number_roundtrip_mpzToLongObj_longObjToMPZ(self): - """Test that mpzToLongObj and longObjToMPZ (internal functions) roundtrip negative numbers correctly.""" - n = -100000000000000000000000000000000000 - e = 2 - k = number._fastmath.rsa_construct(n, e) - self.assertEqual(n, k.n) - self.assertEqual(e, k.e) - -def get_tests(config={}): - from Crypto.SelfTest.st_common import list_test_cases - return list_test_cases(MiscTests) - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_winrandom.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_winrandom.py deleted file mode 100644 index 3fc5145..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/Util/test_winrandom.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self-test suite for Crypto.Util.winrandom""" - -__revision__ = "$Id$" - -import unittest - -class WinRandomImportTest(unittest.TestCase): - def runTest(self): - """winrandom: simple test""" - # Import the winrandom module and try to use it - from Crypto.Util import winrandom - randobj = winrandom.new() - x = randobj.get_bytes(16) - y = randobj.get_bytes(16) - self.assertNotEqual(x, y) - -def get_tests(config={}): - return [WinRandomImportTest()] - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/__init__.py deleted file mode 100644 index c3281eb..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/__init__.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/__init__.py: Self-test for PyCrypto -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Self tests - -These tests should perform quickly and can ideally be used every time an -application runs. -""" - -__revision__ = "$Id$" - -import sys -import unittest -from io import StringIO - -class SelfTestError(Exception): - def __init__(self, message, result): - Exception.__init__(self, message, result) - self.message = message - self.result = result - -def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): - """Execute self-tests. - - This raises SelfTestError if any test is unsuccessful. - - You may optionally pass in a sub-module of SelfTest if you only want to - perform some of the tests. For example, the following would test only the - hash modules: - - Crypto.SelfTest.run(Crypto.SelfTest.Hash) - - """ - if config is None: - config = {} - suite = unittest.TestSuite() - if module is None: - if tests is None: - tests = get_tests(config=config) - suite.addTests(tests) - else: - if tests is None: - suite.addTests(module.get_tests(config=config)) - else: - raise ValueError("'module' and 'tests' arguments are mutually exclusive") - if stream is None: - kwargs['stream'] = StringIO() - runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) - result = runner.run(suite) - if not result.wasSuccessful(): - if stream is None: - sys.stderr.write(stream.getvalue()) - raise SelfTestError("Self-test failed", result) - return result - -def get_tests(config={}): - tests = [] - from Crypto.SelfTest import Cipher; tests += Cipher.get_tests(config=config) - from Crypto.SelfTest import Hash; tests += Hash.get_tests(config=config) - from Crypto.SelfTest import Protocol; tests += Protocol.get_tests(config=config) - from Crypto.SelfTest import PublicKey; tests += PublicKey.get_tests(config=config) - from Crypto.SelfTest import Random; tests += Random.get_tests(config=config) - from Crypto.SelfTest import Util; tests += Util.get_tests(config=config) - from Crypto.SelfTest import Signature; tests += Signature.get_tests(config=config) - return tests - -if __name__ == '__main__': - suite = lambda: unittest.TestSuite(get_tests()) - unittest.main(defaultTest='suite') - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/st_common.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/st_common.py deleted file mode 100644 index c56eac5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/SelfTest/st_common.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SelfTest/st_common.py: Common functions for SelfTest modules -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Common functions for SelfTest modules""" - -__revision__ = "$Id$" - -import unittest -import binascii -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -class _list_testloader(unittest.TestLoader): - suiteClass = list - -def list_test_cases(class_): - """Return a list of TestCase instances given a TestCase class - - This is useful when you have defined test* methods on your TestCase class. - """ - return _list_testloader().loadTestsFromTestCase(class_) - -def strip_whitespace(s): - """Remove whitespace from a text or byte string""" - if isinstance(s,str): - return b("".join(s.split())) - else: - return b("").join(s.split()) - -def a2b_hex(s): - """Convert hexadecimal to binary, ignoring whitespace""" - return binascii.a2b_hex(strip_whitespace(s)) - -def b2a_hex(s): - """Convert binary to hexadecimal""" - # For completeness - return binascii.b2a_hex(s) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_PSS.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_PSS.py deleted file mode 100644 index 319851e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_PSS.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Signature/PKCS1_PSS.py : PKCS#1 PPS -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""RSA digital signature protocol with appendix according to PKCS#1 PSS. - -See RFC3447__ or the `original RSA Labs specification`__. - -This scheme is more properly called ``RSASSA-PSS``. - -For example, a sender may authenticate a message using SHA-1 and PSS like -this: - - >>> from Crypto.Signature import PKCS1_PSS - >>> from Crypto.Hash import SHA - >>> from Crypto.PublicKey import RSA - >>> from Crypto import Random - >>> - >>> message = 'To be signed' - >>> key = RSA.importKey(open('privkey.der').read()) - >>> h = SHA.new() - >>> h.update(message) - >>> signer = PKCS1_PSS.new(key) - >>> signature = PKCS1_PSS.sign(key) - -At the receiver side, verification can be done like using the public part of -the RSA key: - - >>> key = RSA.importKey(open('pubkey.der').read()) - >>> h = SHA.new() - >>> h.update(message) - >>> verifier = PKCS1_PSS.new(key) - >>> if verifier.verify(h, signature): - >>> print "The signature is authentic." - >>> else: - >>> print "The signature is not authentic." - -:undocumented: __revision__, __package__ - -.. __: http://www.ietf.org/rfc/rfc3447.txt -.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 -""" - -# Allow nested scopes in Python 2.1 -# See http://oreilly.com/pub/a/python/2001/04/19/pythonnews.html - - -__revision__ = "$Id$" -__all__ = [ 'new', 'PSS_SigScheme' ] - -from Crypto.Util.py3compat import * -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -import Crypto.Util.number -from Crypto.Util.number import ceil_shift, ceil_div, long_to_bytes -from Crypto.Util.strxor import strxor - -class PSS_SigScheme: - """This signature scheme can perform PKCS#1 PSS RSA signature or verification.""" - - def __init__(self, key, mgfunc, saltLen): - """Initialize this PKCS#1 PSS signature scheme object. - - :Parameters: - key : an RSA key object - If a private half is given, both signature and verification are possible. - If a public half is given, only verification is possible. - mgfunc : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - saltLen : int - Length of the salt, in bytes. - """ - self._key = key - self._saltLen = saltLen - self._mgfunc = mgfunc - - def can_sign(self): - """Return True if this cipher object can be used for signing messages.""" - return self._key.has_private() - - def sign(self, mhash): - """Produce the PKCS#1 PSS signature of a message. - - This function is named ``RSASSA-PSS-SIGN``, and is specified in - section 8.1.1 of RFC3447. - - :Parameters: - mhash : hash object - The hash that was carried out over the message. This is an object - belonging to the `Crypto.Hash` module. - - :Return: The PSS signature encoded as a string. - :Raise ValueError: - If the RSA key length is not sufficiently long to deal with the given - hash algorithm. - :Raise TypeError: - If the RSA key has no private half. - - :attention: Modify the salt length and the mask generation function only - if you know what you are doing. - The receiver must use the same parameters too. - """ - # TODO: Verify the key is RSA - - randfunc = self._key._randfunc - - # Set defaults for salt length and mask generation function - if self._saltLen == None: - sLen = mhash.digest_size - else: - sLen = self._saltLen - if self._mgfunc: - mgf = self._mgfunc - else: - mgf = lambda x,y: MGF1(x,y,mhash) - - modBits = Crypto.Util.number.size(self._key.n) - - # See 8.1.1 in RFC3447 - k = ceil_div(modBits,8) # Convert from bits to bytes - # Step 1 - em = EMSA_PSS_ENCODE(mhash, modBits-1, randfunc, mgf, sLen) - # Step 2a (OS2IP) and 2b (RSASP1) - m = self._key.decrypt(em) - # Step 2c (I2OSP) - S = bchr(0x00)*(k-len(m)) + m - return S - - def verify(self, mhash, S): - """Verify that a certain PKCS#1 PSS signature is authentic. - - This function checks if the party holding the private half of the given - RSA key has really signed the message. - - This function is called ``RSASSA-PSS-VERIFY``, and is specified in section - 8.1.2 of RFC3447. - - :Parameters: - mhash : hash object - The hash that was carried out over the message. This is an object - belonging to the `Crypto.Hash` module. - S : string - The signature that needs to be validated. - - :Return: True if verification is correct. False otherwise. - """ - # TODO: Verify the key is RSA - - # Set defaults for salt length and mask generation function - if self._saltLen == None: - sLen = mhash.digest_size - else: - sLen = self._saltLen - if self._mgfunc: - mgf = self._mgfunc - else: - mgf = lambda x,y: MGF1(x,y,mhash) - - modBits = Crypto.Util.number.size(self._key.n) - - # See 8.1.2 in RFC3447 - k = ceil_div(modBits,8) # Convert from bits to bytes - # Step 1 - if len(S) != k: - return False - # Step 2a (O2SIP), 2b (RSAVP1), and partially 2c (I2OSP) - # Note that signature must be smaller than the module - # but RSA.py won't complain about it. - # TODO: Fix RSA object; don't do it here. - em = self._key.encrypt(S, 0)[0] - # Step 2c - emLen = ceil_div(modBits-1,8) - em = bchr(0x00)*(emLen-len(em)) + em - # Step 3 - try: - result = EMSA_PSS_VERIFY(mhash, em, modBits-1, mgf, sLen) - except ValueError: - return False - # Step 4 - return result - -def MGF1(mgfSeed, maskLen, hash): - """Mask Generation Function, described in B.2.1""" - T = b("") - for counter in range(ceil_div(maskLen, hash.digest_size)): - c = long_to_bytes(counter, 4) - T = T + hash.new(mgfSeed + c).digest() - assert(len(T)>=maskLen) - return T[:maskLen] - -def EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): - """ - Implement the ``EMSA-PSS-ENCODE`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.1.1). - - The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` as input, - and hash it internally. Here, we expect that the message has already - been hashed instead. - - :Parameters: - mhash : hash object - The hash object that holds the digest of the message being signed. - emBits : int - Maximum length of the final encoding, in bits. - randFunc : callable - An RNG function that accepts as only parameter an int, and returns - a string of random bytes, to be used as salt. - mgf : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - sLen : int - Length of the salt, in bytes. - - :Return: An ``emLen`` byte long string that encodes the hash - (with ``emLen = \ceil(emBits/8)``). - - :Raise ValueError: - When digest or salt length are too big. - """ - - emLen = ceil_div(emBits,8) - - # Bitmask of digits that fill up - lmask = 0 - for i in range(8*emLen-emBits): - lmask = lmask>>1 | 0x80 - - # Step 1 and 2 have been already done - # Step 3 - if emLen < mhash.digest_size+sLen+2: - raise ValueError("Digest or salt length are too long for given key size.") - # Step 4 - salt = b("") - if randFunc and sLen>0: - salt = randFunc(sLen) - # Step 5 and 6 - h = mhash.new(bchr(0x00)*8 + mhash.digest() + salt) - # Step 7 and 8 - db = bchr(0x00)*(emLen-sLen-mhash.digest_size-2) + bchr(0x01) + salt - # Step 9 - dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) - # Step 10 - maskedDB = strxor(db,dbMask) - # Step 11 - maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] - # Step 12 - em = maskedDB + h.digest() + bchr(0xBC) - return em - -def EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): - """ - Implement the ``EMSA-PSS-VERIFY`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.1.2). - - ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, - and hash it internally. Here, we expect that the message has already - been hashed instead. - - :Parameters: - mhash : hash object - The hash object that holds the digest of the message to be verified. - em : string - The signature to verify, therefore proving that the sender really signed - the message that was received. - emBits : int - Length of the final encoding (em), in bits. - mgf : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - sLen : int - Length of the salt, in bytes. - - :Return: 0 if the encoding is consistent, 1 if it is inconsistent. - - :Raise ValueError: - When digest or salt length are too big. - """ - - emLen = ceil_div(emBits,8) - - # Bitmask of digits that fill up - lmask = 0 - for i in range(8*emLen-emBits): - lmask = lmask>>1 | 0x80 - - # Step 1 and 2 have been already done - # Step 3 - if emLen < mhash.digest_size+sLen+2: - return False - # Step 4 - if ord(em[-1:])!=0xBC: - return False - # Step 5 - maskedDB = em[:emLen-mhash.digest_size-1] - h = em[emLen-mhash.digest_size-1:-1] - # Step 6 - if lmask & bord(em[0]): - return False - # Step 7 - dbMask = mgf(h, emLen-mhash.digest_size-1) - # Step 8 - db = strxor(maskedDB, dbMask) - # Step 9 - db = bchr(bord(db[0]) & ~lmask) + db[1:] - # Step 10 - if not db.startswith(bchr(0x00)*(emLen-mhash.digest_size-sLen-2) + bchr(0x01)): - return False - # Step 11 - salt = b("") - if sLen: salt = db[-sLen:] - # Step 12 and 13 - hp = mhash.new(bchr(0x00)*8 + mhash.digest() + salt).digest() - # Step 14 - if h!=hp: - return False - return True - -def new(key, mgfunc=None, saltLen=None): - """Return a signature scheme object `PSS_SigScheme` that - can be used to perform PKCS#1 PSS signature or verification. - - :Parameters: - key : RSA key object - The key to use to sign or verify the message. This is a `Crypto.PublicKey.RSA` object. - Signing is only possible if *key* is a private RSA key. - mgfunc : callable - A mask generation function that accepts two parameters: a string to - use as seed, and the lenth of the mask to generate, in bytes. - If not specified, the standard MGF1 is used. - saltLen : int - Length of the salt, in bytes. If not specified, it matches the output - size of the hash function. - - """ - return PSS_SigScheme(key, mgfunc, saltLen) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_v1_5.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_v1_5.py deleted file mode 100644 index 73ac251..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Signature/PKCS1_v1_5.py +++ /dev/null @@ -1,236 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Signature/PKCS1-v1_5.py : PKCS#1 v1.5 -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -""" -RSA digital signature protocol according to PKCS#1 v1.5 - -See RFC3447__ or the `original RSA Labs specification`__. - -This scheme is more properly called ``RSASSA-PKCS1-v1_5``. - -For example, a sender may authenticate a message using SHA-1 like -this: - - >>> from Crypto.Signature import PKCS1_v1_5 - >>> from Crypto.Hash import SHA - >>> from Crypto.PublicKey import RSA - >>> - >>> message = 'To be signed' - >>> key = RSA.importKey(open('privkey.der').read()) - >>> h = SHA.new(message) - >>> signer = PKCS1_v1_5.new(key) - >>> signature = signer.sign(h) - -At the receiver side, verification can be done using the public part of -the RSA key: - - >>> key = RSA.importKey(open('pubkey.der').read()) - >>> h = SHA.new(message) - >>> verifier = PKCS1_v1_5.new(key) - >>> if verifier.verify(h, signature): - >>> print "The signature is authentic." - >>> else: - >>> print "The signature is not authentic." - -:undocumented: __revision__, __package__ - -.. __: http://www.ietf.org/rfc/rfc3447.txt -.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 -""" - -__revision__ = "$Id$" -__all__ = [ 'new', 'PKCS115_SigScheme' ] - -import Crypto.Util.number -from Crypto.Util.number import ceil_div -from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString -from Crypto.Util.py3compat import * - -class PKCS115_SigScheme: - """This signature scheme can perform PKCS#1 v1.5 RSA signature or verification.""" - - def __init__(self, key): - """Initialize this PKCS#1 v1.5 signature scheme object. - - :Parameters: - key : an RSA key object - If a private half is given, both signature and verification are possible. - If a public half is given, only verification is possible. - """ - self._key = key - - def can_sign(self): - """Return True if this cipher object can be used for signing messages.""" - return self._key.has_private() - - def sign(self, mhash): - """Produce the PKCS#1 v1.5 signature of a message. - - This function is named ``RSASSA-PKCS1-V1_5-SIGN``, and is specified in - section 8.2.1 of RFC3447. - - :Parameters: - mhash : hash object - The hash that was carried out over the message. This is an object - belonging to the `Crypto.Hash` module. - - :Return: The signature encoded as a string. - :Raise ValueError: - If the RSA key length is not sufficiently long to deal with the given - hash algorithm. - :Raise TypeError: - If the RSA key has no private half. - """ - # TODO: Verify the key is RSA - - # See 8.2.1 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - - # Step 1 - em = EMSA_PKCS1_V1_5_ENCODE(mhash, k) - # Step 2a (OS2IP) and 2b (RSASP1) - m = self._key.decrypt(em) - # Step 2c (I2OSP) - S = bchr(0x00)*(k-len(m)) + m - return S - - def verify(self, mhash, S): - """Verify that a certain PKCS#1 v1.5 signature is authentic. - - This function checks if the party holding the private half of the key - really signed the message. - - This function is named ``RSASSA-PKCS1-V1_5-VERIFY``, and is specified in - section 8.2.2 of RFC3447. - - :Parameters: - mhash : hash object - The hash that was carried out over the message. This is an object - belonging to the `Crypto.Hash` module. - S : string - The signature that needs to be validated. - - :Return: True if verification is correct. False otherwise. - """ - # TODO: Verify the key is RSA - - # See 8.2.2 in RFC3447 - modBits = Crypto.Util.number.size(self._key.n) - k = ceil_div(modBits,8) # Convert from bits to bytes - - # Step 1 - if len(S) != k: - return 0 - # Step 2a (O2SIP) and 2b (RSAVP1) - # Note that signature must be smaller than the module - # but RSA.py won't complain about it. - # TODO: Fix RSA object; don't do it here. - m = self._key.encrypt(S, 0)[0] - # Step 2c (I2OSP) - em1 = bchr(0x00)*(k-len(m)) + m - # Step 3 - try: - em2 = EMSA_PKCS1_V1_5_ENCODE(mhash, k) - except ValueError: - return 0 - # Step 4 - # By comparing the full encodings (as opposed to checking each - # of its components one at a time) we avoid attacks to the padding - # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). - # - return em1==em2 - -def EMSA_PKCS1_V1_5_ENCODE(hash, emLen): - """ - Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined - in PKCS#1 v2.1 (RFC3447, 9.2). - - ``EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, - and hash it internally. Here, we expect that the message has already - been hashed instead. - - :Parameters: - hash : hash object - The hash object that holds the digest of the message being signed. - emLen : int - The length the final encoding must have, in bytes. - - :attention: the early standard (RFC2313) stated that ``DigestInfo`` - had to be BER-encoded. This means that old signatures - might have length tags in indefinite form, which - is not supported in DER. Such encoding cannot be - reproduced by this function. - - :attention: the same standard defined ``DigestAlgorithm`` to be - of ``AlgorithmIdentifier`` type, where the PARAMETERS - item is optional. Encodings for ``MD2/4/5`` without - ``PARAMETERS`` cannot be reproduced by this function. - - :Return: An ``emLen`` byte long string that encodes the hash. - """ - - # First, build the ASN.1 DER object DigestInfo: - # - # DigestInfo ::= SEQUENCE { - # digestAlgorithm AlgorithmIdentifier, - # digest OCTET STRING - # } - # - # where digestAlgorithm identifies the hash function and shall be an - # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. - # - # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { - # { OID id-md2 PARAMETERS NULL }| - # { OID id-md5 PARAMETERS NULL }| - # { OID id-sha1 PARAMETERS NULL }| - # { OID id-sha256 PARAMETERS NULL }| - # { OID id-sha384 PARAMETERS NULL }| - # { OID id-sha512 PARAMETERS NULL } - # } - # - digestAlgo = DerSequence([hash.oid, DerNull().encode()]) - digest = DerOctetString(hash.digest()) - digestInfo = DerSequence([ - digestAlgo.encode(), - digest.encode() - ]).encode() - - # We need at least 11 bytes for the remaining data: 3 fixed bytes and - # at least 8 bytes of padding). - if emLen -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -"""Fast counter functions for CTR cipher modes. - -CTR is a chaining mode for symmetric block encryption or decryption. -Messages are divideded into blocks, and the cipher operation takes -place on each block using the secret key and a unique *counter block*. - -The most straightforward way to fulfil the uniqueness property is -to start with an initial, random *counter block* value, and increment it as -the next block is processed. - -The block ciphers from `Crypto.Cipher` (when configured in *MODE_CTR* mode) -invoke a callable object (the *counter* parameter) to get the next *counter block*. -Unfortunately, the Python calling protocol leads to major performance degradations. - -The counter functions instantiated by this module will be invoked directly -by the ciphers in `Crypto.Cipher`. The fact that the Python layer is bypassed -lead to more efficient (and faster) execution of CTR cipher modes. - -An example of usage is the following: - - >>> from Crypto.Cipher import AES - >>> from Crypto.Util import Counter - >>> - >>> pt = b'\x00'*1000000 - >>> ctr = Counter.new(128) - >>> cipher = AES.new(b'\x00'*16, AES.MODE_CTR, counter=ctr) - >>> ct = cipher.encrypt(pt) - -:undocumented: __package__ -""" -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * -from Crypto.Util.py3compat import * - -from Crypto.Util import _counter -import struct - -# Factory function -def new(nbits, prefix=b(""), suffix=b(""), initial_value=1, overflow=0, little_endian=False, allow_wraparound=False, disable_shortcut=False): - """Create a stateful counter block function suitable for CTR encryption modes. - - Each call to the function returns the next counter block. - Each counter block is made up by three parts:: - - prefix || counter value || postfix - - The counter value is incremented by one at each call. - - :Parameters: - nbits : integer - Length of the desired counter, in bits. It must be a multiple of 8. - prefix : byte string - The constant prefix of the counter block. By default, no prefix is - used. - suffix : byte string - The constant postfix of the counter block. By default, no suffix is - used. - initial_value : integer - The initial value of the counter. Default value is 1. - little_endian : boolean - If True, the counter number will be encoded in little endian format. - If False (default), in big endian format. - allow_wraparound : boolean - If True, the function will raise an *OverflowError* exception as soon - as the counter wraps around. If False (default), the counter will - simply restart from zero. - disable_shortcut : boolean - If True, do not make ciphers from `Crypto.Cipher` bypass the Python - layer when invoking the counter block function. - If False (default), bypass the Python layer. - :Returns: - The counter block function. - """ - - # Sanity-check the message size - (nbytes, remainder) = divmod(nbits, 8) - if remainder != 0: - # In the future, we might support arbitrary bit lengths, but for now we don't. - raise ValueError("nbits must be a multiple of 8; got %d" % (nbits,)) - if nbytes < 1: - raise ValueError("nbits too small") - elif nbytes > 0xffff: - raise ValueError("nbits too large") - - initval = _encode(initial_value, nbytes, little_endian) - - if little_endian: - return _counter._newLE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) - else: - return _counter._newBE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) - -def _encode(n, nbytes, little_endian=False): - retval = [] - n = int(n) - for i in range(nbytes): - if little_endian: - retval.append(bchr(n & 0xff)) - else: - retval.insert(0, bchr(n & 0xff)) - n >>= 8 - return b("").join(retval) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/RFC1751.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/RFC1751.py deleted file mode 100644 index 1c10c4a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/RFC1751.py +++ /dev/null @@ -1,365 +0,0 @@ -# rfc1751.py : Converts between 128-bit strings and a human-readable -# sequence of words, as defined in RFC1751: "A Convention for -# Human-Readable 128-bit Keys", by Daniel L. McDonald. -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - - -import binascii -from Crypto.Util.py3compat import * -from functools import reduce - -binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101', - 6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011', - 12:'1100', 13:'1101', 14:'1110', 15:'1111'} - -def _key2bin(s): - "Convert a key into a string of binary digits" - kl=[bord(x) for x in s] - kl=[binary[x>>4]+binary[x&15] for x in kl] - return ''.join(kl) - -def _extract(key, start, length): - """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its - numeric value.""" - k=key[start:start+length] - return reduce(lambda x,y: x*2+ord(y)-48, k, 0) - -def key_to_english (key): - """key_to_english(key:string(2.x)/bytes(3.x)) : string - Transform an arbitrary key into a string containing English words. - The key length must be a multiple of 8. - """ - english='' - for index in range(0, len(key), 8): # Loop over 8-byte subkeys - subkey=key[index:index+8] - # Compute the parity of the key - skbin=_key2bin(subkey) ; p=0 - for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) - # Append parity bits to the subkey - skbin=_key2bin(subkey+bchr((p<<6) & 255)) - for i in range(0, 64, 11): - english=english+wordlist[_extract(skbin, i, 11)]+' ' - - return english[:-1] # Remove the trailing space - -def english_to_key (s): - """english_to_key(string):string(2.x)/bytes(2.x) - Transform a string into a corresponding key. - The string must contain words separated by whitespace; the number - of words must be a multiple of 6. - """ - - L=s.upper().split() ; key=b('') - for index in range(0, len(L), 6): - sublist=L[index:index+6] ; char=9*[0] ; bits=0 - for i in sublist: - index = wordlist.index(i) - shift = (8-(bits+11)%8) %8 - y = index << shift - cl, cc, cr = (y>>16), (y>>8)&0xff, y & 0xff - if (shift>5): - char[bits>>3] = char[bits>>3] | cl - char[(bits>>3)+1] = char[(bits>>3)+1] | cc - char[(bits>>3)+2] = char[(bits>>3)+2] | cr - elif shift>-3: - char[bits>>3] = char[bits>>3] | cc - char[(bits>>3)+1] = char[(bits>>3)+1] | cr - else: char[bits>>3] = char[bits>>3] | cr - bits=bits+11 - subkey=reduce(lambda x,y:x+bchr(y), char, b('')) - - # Check the parity of the resulting key - skbin=_key2bin(subkey) - p=0 - for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) - if (p&3) != _extract(skbin, 64, 2): - raise ValueError("Parity error in resulting key") - key=key+subkey[0:8] - return key - -wordlist=[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", - "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", - "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", - "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", - "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", - "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", - "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", - "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", - "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", - "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", - "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", - "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", - "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", - "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", - "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", - "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", - "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", - "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", - "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", - "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", - "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", - "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", - "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", - "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", - "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", - "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", - "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", - "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", - "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", - "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", - "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", - "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", - "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", - "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", - "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", - "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", - "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", - "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", - "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", - "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", - "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", - "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", - "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", - "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", - "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", - "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", - "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", - "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", - "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", - "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", - "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", - "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", - "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", - "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", - "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", - "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", - "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", - "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", - "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", - "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", - "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", - "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", - "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", - "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", - "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", - "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", - "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", - "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", - "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", - "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", - "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", - "BEAU", "BECK", "BEEF", "BEEN", "BEER", - "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", - "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", - "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", - "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", - "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", - "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", - "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", - "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", - "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", - "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", - "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", - "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", - "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", - "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", - "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", - "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", - "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", - "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", - "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", - "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", - "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", - "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", - "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", - "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", - "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", - "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", - "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", - "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", - "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", - "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", - "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", - "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", - "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", - "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", - "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", - "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", - "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", - "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", - "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", - "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", - "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", - "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", - "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", - "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", - "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", - "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", - "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", - "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", - "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", - "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", - "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", - "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", - "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", - "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", - "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", - "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", - "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", - "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", - "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", - "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", - "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", - "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", - "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", - "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", - "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", - "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", - "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", - "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", - "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", - "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", - "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", - "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", - "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", - "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", - "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", - "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", - "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", - "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", - "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", - "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", - "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", - "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", - "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", - "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", - "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", - "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", - "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", - "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", - "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", - "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", - "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", - "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", - "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", - "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", - "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", - "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", - "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", - "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", - "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", - "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", - "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", - "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", - "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", - "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", - "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", - "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", - "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", - "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", - "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", - "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", - "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", - "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", - "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", - "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", - "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", - "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", - "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", - "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", - "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", - "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", - "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", - "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", - "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", - "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", - "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", - "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", - "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", - "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", - "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", - "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", - "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", - "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", - "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", - "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", - "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", - "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", - "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", - "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", - "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", - "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", - "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", - "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", - "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", - "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", - "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", - "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", - "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", - "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", - "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", - "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", - "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", - "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", - "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", - "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", - "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", - "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", - "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", - "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", - "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", - "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", - "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", - "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", - "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", - "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", - "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", - "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", - "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", - "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", - "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", - "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", - "YELL", "YOGA", "YOKE" ] - -if __name__=='__main__': - data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), - ('CCAC2AED591056BE4F90FD441C534766', - 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), - ('EFF81F9BFBC65350920CDD7416DE8009', - 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') - ] - - for key, words in data: - print('Trying key', key) - key=binascii.a2b_hex(key) - w2=key_to_english(key) - if w2!=words: - print('key_to_english fails on key', repr(key), ', producing', str(w2)) - k2=english_to_key(words) - if k2!=key: - print('english_to_key fails on key', repr(key), ', producing', repr(k2)) - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/__init__.py deleted file mode 100644 index a3bef8a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Miscellaneous modules - -Contains useful modules that don't belong into any of the -other Crypto.* subpackages. - -Crypto.Util.number Number-theoretic functions (primality testing, etc.) -Crypto.Util.randpool Random number generation -Crypto.Util.RFC1751 Converts between 128-bit keys and human-readable - strings of words. -Crypto.Util.asn1 Minimal support for ASN.1 DER encoding - -""" - -__all__ = ['randpool', 'RFC1751', 'number', 'strxor', 'asn1' ] - -__revision__ = "$Id$" - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_counter.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_counter.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 2f06398..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_counter.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_number_new.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_number_new.py deleted file mode 100644 index 5f29176..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/_number_new.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: ascii -*- -# -# Util/_number_new.py : utility functions -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -## NOTE: Do not import this module directly. Import these functions from Crypto.Util.number. - -__revision__ = "$Id$" -__all__ = ['ceil_shift', 'ceil_div', 'floor_div', 'exact_log2', 'exact_div'] - -import sys -if sys.version_info[0] == 2 and sys.version_info[1] == 1: - from Crypto.Util.py21compat import * - -def ceil_shift(n, b): - """Return ceil(n / 2**b) without performing any floating-point or division operations. - - This is done by right-shifting n by b bits and incrementing the result by 1 - if any '1' bits were shifted out. - """ - if not isinstance(n, int) or not isinstance(b, int): - raise TypeError("unsupported operand type(s): %r and %r" % (type(n).__name__, type(b).__name__)) - - assert n >= 0 and b >= 0 # I haven't tested or even thought about negative values - mask = (1 << b) - 1 - if n & mask: - return (n >> b) + 1 - else: - return n >> b - -def ceil_div(a, b): - """Return ceil(a / b) without performing any floating-point operations.""" - - if not isinstance(a, int) or not isinstance(b, int): - raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) - - (q, r) = divmod(a, b) - if r: - return q + 1 - else: - return q - -def floor_div(a, b): - if not isinstance(a, int) or not isinstance(b, int): - raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) - - (q, r) = divmod(a, b) - return q - -def exact_log2(num): - """Find and return an integer i >= 0 such that num == 2**i. - - If no such integer exists, this function raises ValueError. - """ - - if not isinstance(num, int): - raise TypeError("unsupported operand type: %r" % (type(num).__name__,)) - - n = int(num) - if n <= 0: - raise ValueError("cannot compute logarithm of non-positive number") - - i = 0 - while n != 0: - if (n & 1) and n != 1: - raise ValueError("No solution could be found") - i += 1 - n >>= 1 - i -= 1 - - assert num == (1 << i) - return i - -def exact_div(p, d, allow_divzero=False): - """Find and return an integer n such that p == n * d - - If no such integer exists, this function raises ValueError. - - Both operands must be integers. - - If the second operand is zero, this function will raise ZeroDivisionError - unless allow_divzero is true (default: False). - """ - - if not isinstance(p, int) or not isinstance(d, int): - raise TypeError("unsupported operand type(s): %r and %r" % (type(p).__name__, type(d).__name__)) - - if d == 0 and allow_divzero: - n = 0 - if p != n * d: - raise ValueError("No solution could be found") - else: - (n, r) = divmod(p, d) - if r != 0: - raise ValueError("No solution could be found") - - assert p == n * d - return n - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/asn1.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/asn1.py deleted file mode 100644 index 9a97d97..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/asn1.py +++ /dev/null @@ -1,286 +0,0 @@ -# -*- coding: ascii -*- -# -# Util/asn1.py : Minimal support for ASN.1 DER binary encoding. -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -from Crypto.Util.number import long_to_bytes, bytes_to_long -import sys -from Crypto.Util.py3compat import * - -__all__ = [ 'DerObject', 'DerInteger', 'DerOctetString', 'DerNull', 'DerSequence', 'DerObjectId' ] - -class DerObject: - """Base class for defining a single DER object. - - Instantiate this class ONLY when you have to decode a DER element. - """ - - # Known TAG types - typeTags = { 'SEQUENCE': 0x30, 'BIT STRING': 0x03, 'INTEGER': 0x02, - 'OCTET STRING': 0x04, 'NULL': 0x05, 'OBJECT IDENTIFIER': 0x06 } - - def __init__(self, ASN1Type=None, payload=b('')): - """Initialize the DER object according to a specific type. - - The ASN.1 type is either specified as the ASN.1 string (e.g. - 'SEQUENCE'), directly with its numerical tag or with no tag - at all (None).""" - if isInt(ASN1Type) or ASN1Type is None: - self.typeTag = ASN1Type - else: - if len(ASN1Type)==1: - self.typeTag = ord(ASN1Type) - else: - self.typeTag = self.typeTags.get(ASN1Type) - self.payload = payload - - def isType(self, ASN1Type): - return self.typeTags[ASN1Type]==self.typeTag - - def _lengthOctets(self, payloadLen): - """Return a byte string that encodes the given payload length (in - bytes) in a format suitable for a DER length tag (L). - """ - if payloadLen>127: - encoding = long_to_bytes(payloadLen) - return bchr(len(encoding)+128) + encoding - return bchr(payloadLen) - - def encode(self): - """Return a complete DER element, fully encoded as a TLV.""" - return bchr(self.typeTag) + self._lengthOctets(len(self.payload)) + self.payload - - def _decodeLen(self, idx, der): - """Given a (part of a) DER element, and an index to the first byte of - a DER length tag (L), return a tuple with the payload size, - and the index of the first byte of the such payload (V). - - Raises a ValueError exception if the DER length is invalid. - Raises an IndexError exception if the DER element is too short. - """ - length = bord(der[idx]) - if length<=127: - return (length,idx+1) - payloadLength = bytes_to_long(der[idx+1:idx+1+(length & 0x7F)]) - if payloadLength<=127: - raise ValueError("Not a DER length tag.") - return (payloadLength, idx+1+(length & 0x7F)) - - def decode(self, derEle, noLeftOvers=0): - """Decode a complete DER element, and re-initializes this - object with it. - - @param derEle A complete DER element. It must start with a DER T - tag. - @param noLeftOvers Indicate whether it is acceptable to complete the - parsing of the DER element and find that not all - bytes in derEle have been used. - @return Index of the first unused byte in the given DER element. - - Raises a ValueError exception in case of parsing errors. - Raises an IndexError exception if the DER element is too short. - """ - try: - self.typeTag = bord(derEle[0]) - if (self.typeTag & 0x1F)==0x1F: - raise ValueError("Unsupported DER tag") - (length,idx) = self._decodeLen(1, derEle) - if noLeftOvers and len(derEle) != (idx+length): - raise ValueError("Not a DER structure") - self.payload = derEle[idx:idx+length] - except IndexError: - raise ValueError("Not a valid DER SEQUENCE.") - return idx+length - -class DerInteger(DerObject): - def __init__(self, value = 0): - """Class to model an INTEGER DER element. - - Limitation: only non-negative values are supported. - """ - DerObject.__init__(self, 'INTEGER') - self.value = value - - def encode(self): - """Return a complete INTEGER DER element, fully encoded as a TLV.""" - self.payload = long_to_bytes(self.value) - if bord(self.payload[0])>127: - self.payload = bchr(0x00) + self.payload - return DerObject.encode(self) - - def decode(self, derEle, noLeftOvers=0): - """Decode a complete INTEGER DER element, and re-initializes this - object with it. - - @param derEle A complete INTEGER DER element. It must start with a DER - INTEGER tag. - @param noLeftOvers Indicate whether it is acceptable to complete the - parsing of the DER element and find that not all - bytes in derEle have been used. - @return Index of the first unused byte in the given DER element. - - Raises a ValueError exception if the DER element is not a - valid non-negative INTEGER. - Raises an IndexError exception if the DER element is too short. - """ - tlvLength = DerObject.decode(self, derEle, noLeftOvers) - if self.typeTag!=self.typeTags['INTEGER']: - raise ValueError ("Not a DER INTEGER.") - if bord(self.payload[0])>127: - raise ValueError ("Negative INTEGER.") - self.value = bytes_to_long(self.payload) - return tlvLength - -class DerSequence(DerObject): - """Class to model a SEQUENCE DER element. - - This object behave like a dynamic Python sequence. - Sub-elements that are INTEGERs, look like Python integers. - Any other sub-element is a binary string encoded as the complete DER - sub-element (TLV). - """ - - def __init__(self, startSeq=None): - """Initialize the SEQUENCE DER object. Always empty - initially.""" - DerObject.__init__(self, 'SEQUENCE') - if startSeq==None: - self._seq = [] - else: - self._seq = startSeq - - ## A few methods to make it behave like a python sequence - - def __delitem__(self, n): - del self._seq[n] - def __getitem__(self, n): - return self._seq[n] - def __setitem__(self, key, value): - self._seq[key] = value - def __setslice__(self,i,j,sequence): - self._seq[i:j] = sequence - def __delslice__(self,i,j): - del self._seq[i:j] - def __getslice__(self, i, j): - return self._seq[max(0, i):max(0, j)] - def __len__(self): - return len(self._seq) - def append(self, item): - return self._seq.append(item) - - def hasInts(self): - """Return the number of items in this sequence that are numbers.""" - return len(list(filter(isInt, self._seq))) - - def hasOnlyInts(self): - """Return True if all items in this sequence are numbers.""" - return self._seq and self.hasInts()==len(self._seq) - - def encode(self): - """Return the DER encoding for the ASN.1 SEQUENCE, containing - the non-negative integers and longs added to this object. - - Limitation: Raises a ValueError exception if it some elements - in the sequence are neither Python integers nor complete DER INTEGERs. - """ - self.payload = b('') - for item in self._seq: - try: - self.payload += item - except: - try: - self.payload += DerInteger(item).encode() - except: - raise ValueError("Trying to DER encode an unknown object") - return DerObject.encode(self) - - def decode(self, derEle, noLeftOvers=0): - """Decode a complete SEQUENCE DER element, and re-initializes this - object with it. - - @param derEle A complete SEQUENCE DER element. It must start with a DER - SEQUENCE tag. - @param noLeftOvers Indicate whether it is acceptable to complete the - parsing of the DER element and find that not all - bytes in derEle have been used. - @return Index of the first unused byte in the given DER element. - - DER INTEGERs are decoded into Python integers. Any other DER - element is not decoded. Its validity is not checked. - - Raises a ValueError exception if the DER element is not a - valid DER SEQUENCE. - Raises an IndexError exception if the DER element is too short. - """ - - self._seq = [] - try: - tlvLength = DerObject.decode(self, derEle, noLeftOvers) - if self.typeTag!=self.typeTags['SEQUENCE']: - raise ValueError("Not a DER SEQUENCE.") - # Scan one TLV at once - idx = 0 - while idx= 5 to avoid timing attack vulnerability.", PowmInsecureWarning) - -# New functions -from ._number_new import * - -# Commented out and replaced with faster versions below -## def long2str(n): -## s='' -## while n>0: -## s=chr(n & 255)+s -## n=n>>8 -## return s - -## import types -## def str2long(s): -## if type(s)!=types.StringType: return s # Integers will be left alone -## return reduce(lambda x,y : x*256+ord(y), s, 0L) - -def size (N): - """size(N:long) : int - Returns the size of the number N in bits. - """ - bits = 0 - while N >> bits: - bits += 1 - return bits - -def getRandomNumber(N, randfunc=None): - """Deprecated. Use getRandomInteger or getRandomNBitInteger instead.""" - warnings.warn("Crypto.Util.number.getRandomNumber has confusing semantics"+ - "and has been deprecated. Use getRandomInteger or getRandomNBitInteger instead.", - GetRandomNumber_DeprecationWarning) - return getRandomNBitInteger(N, randfunc) - -def getRandomInteger(N, randfunc=None): - """getRandomInteger(N:int, randfunc:callable):long - Return a random number with at most N bits. - - If randfunc is omitted, then Random.new().read is used. - - This function is for internal use only and may be renamed or removed in - the future. - """ - if randfunc is None: - _import_Random() - randfunc = Random.new().read - - S = randfunc(N>>3) - odd_bits = N % 8 - if odd_bits != 0: - char = ord(randfunc(1)) >> (8-odd_bits) - S = bchr(char) + S - value = bytes_to_long(S) - return value - -def getRandomRange(a, b, randfunc=None): - """getRandomRange(a:int, b:int, randfunc:callable):long - Return a random number n so that a <= n < b. - - If randfunc is omitted, then Random.new().read is used. - - This function is for internal use only and may be renamed or removed in - the future. - """ - range_ = b - a - 1 - bits = size(range_) - value = getRandomInteger(bits, randfunc) - while value > range_: - value = getRandomInteger(bits, randfunc) - return a + value - -def getRandomNBitInteger(N, randfunc=None): - """getRandomInteger(N:int, randfunc:callable):long - Return a random number with exactly N-bits, i.e. a random number - between 2**(N-1) and (2**N)-1. - - If randfunc is omitted, then Random.new().read is used. - - This function is for internal use only and may be renamed or removed in - the future. - """ - value = getRandomInteger (N-1, randfunc) - value |= 2 ** (N-1) # Ensure high bit is set - assert size(value) >= N - return value - -def GCD(x,y): - """GCD(x:long, y:long): long - Return the GCD of x and y. - """ - x = abs(x) ; y = abs(y) - while x > 0: - x, y = y % x, x - return y - -def inverse(u, v): - """inverse(u:long, v:long):long - Return the inverse of u mod v. - """ - u3, v3 = int(u), int(v) - u1, v1 = 1, 0 - while v3 > 0: - q=divmod(u3, v3)[0] - u1, v1 = v1, u1 - v1*q - u3, v3 = v3, u3 - v3*q - while u1<0: - u1 = u1 + v - return u1 - -# Given a number of bits to generate and a random generation function, -# find a prime number of the appropriate size. - -def getPrime(N, randfunc=None): - """getPrime(N:int, randfunc:callable):long - Return a random N-bit prime number. - - If randfunc is omitted, then Random.new().read is used. - """ - if randfunc is None: - _import_Random() - randfunc = Random.new().read - - number=getRandomNBitInteger(N, randfunc) | 1 - while (not isPrime(number, randfunc=randfunc)): - number=number+2 - return number - - -def _rabinMillerTest(n, rounds, randfunc=None): - """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int - Tests if n is prime. - Returns 0 when n is definitly composite. - Returns 1 when n is probably prime. - Returns 2 when n is definitly prime. - - If randfunc is omitted, then Random.new().read is used. - - This function is for internal use only and may be renamed or removed in - the future. - """ - # check special cases (n==2, n even, n < 2) - if n < 3 or (n & 1) == 0: - return n == 2 - # n might be very large so it might be beneficial to precalculate n-1 - n_1 = n - 1 - # determine m and b so that 2**b * m = n - 1 and b maximal - b = 0 - m = n_1 - while (m & 1) == 0: - b += 1 - m >>= 1 - - tested = [] - # we need to do at most n-2 rounds. - for i in range (min (rounds, n-2)): - # randomly choose a < n and make sure it hasn't been tested yet - a = getRandomRange (2, n, randfunc) - while a in tested: - a = getRandomRange (2, n, randfunc) - tested.append (a) - # do the rabin-miller test - z = pow (a, m, n) # (a**m) % n - if z == 1 or z == n_1: - continue - composite = 1 - for r in range (b): - z = (z * z) % n - if z == 1: - return 0 - elif z == n_1: - composite = 0 - break - if composite: - return 0 - return 1 - -def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): - """getStrongPrime(N:int, e:int, false_positive_prob:float, randfunc:callable):long - Return a random strong N-bit prime number. - In this context p is a strong prime if p-1 and p+1 have at - least one large prime factor. - N should be a multiple of 128 and > 512. - - If e is provided the returned prime p-1 will be coprime to e - and thus suitable for RSA where e is the public exponent. - - The optional false_positive_prob is the statistical probability - that true is returned even though it is not (pseudo-prime). - It defaults to 1e-6 (less than 1:1000000). - Note that the real probability of a false-positive is far less. This is - just the mathematically provable limit. - - randfunc should take a single int parameter and return that - many random bytes as a string. - If randfunc is omitted, then Random.new().read is used. - """ - # This function was implemented following the - # instructions found in the paper: - # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" - # by Robert D. Silverman - # RSA Laboratories - # May 17, 1997 - # which by the time of writing could be freely downloaded here: - # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf - - # Use the accelerator if available - if _fastmath is not None: - return _fastmath.getStrongPrime(int(N), int(e), false_positive_prob, - randfunc) - - if (N < 512) or ((N % 128) != 0): - raise ValueError ("bits must be multiple of 128 and > 512") - - rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) - - # calculate range for X - # lower_bound = sqrt(2) * 2^{511 + 128*x} - # upper_bound = 2^{512 + 128*x} - 1 - x = (N - 512) >> 7; - # We need to approximate the sqrt(2) in the lower_bound by an integer - # expression because floating point math overflows with these numbers - lower_bound = divmod(14142135623730950489 * (2 ** (511 + 128*x)), - 10000000000000000000)[0] - upper_bound = (1 << (512 + 128*x)) - 1 - # Randomly choose X in calculated range - X = getRandomRange (lower_bound, upper_bound, randfunc) - - # generate p1 and p2 - p = [0, 0] - for i in (0, 1): - # randomly choose 101-bit y - y = getRandomNBitInteger (101, randfunc) - # initialize the field for sieving - field = [0] * 5 * len (sieve_base) - # sieve the field - for prime in sieve_base: - offset = y % prime - for j in range ((prime - offset) % prime, len (field), prime): - field[j] = 1 - - # look for suitable p[i] starting at y - result = 0 - for j in range(len(field)): - composite = field[j] - # look for next canidate - if composite: - continue - tmp = y + j - result = _rabinMillerTest (tmp, rabin_miller_rounds) - if result > 0: - p[i] = tmp - break - if result == 0: - raise RuntimeError ("Couln't find prime in field. " - "Developer: Increase field_size") - - # Calculate R - # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 - tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 - tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 - R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 - - # search for final prime number starting by Y0 - # Y0 = X + (R - X mod p1p2) - increment = p[0] * p[1] - X = X + (R - (X % increment)) - while 1: - is_possible_prime = 1 - # first check candidate against sieve_base - for prime in sieve_base: - if (X % prime) == 0: - is_possible_prime = 0 - break - # if e is given make sure that e and X-1 are coprime - # this is not necessarily a strong prime criterion but useful when - # creating them for RSA where the p-1 and q-1 should be coprime to - # the public exponent e - if e and is_possible_prime: - if e & 1: - if GCD (e, X-1) != 1: - is_possible_prime = 0 - else: - if GCD (e, divmod((X-1),2)[0]) != 1: - is_possible_prime = 0 - - # do some Rabin-Miller-Tests - if is_possible_prime: - result = _rabinMillerTest (X, rabin_miller_rounds) - if result > 0: - break - X += increment - # abort when X has more bits than requested - # TODO: maybe we shouldn't abort but rather start over. - if X >= 1 << N: - raise RuntimeError ("Couln't find prime in field. " - "Developer: Increase field_size") - return X - -def isPrime(N, false_positive_prob=1e-6, randfunc=None): - """isPrime(N:long, false_positive_prob:float, randfunc:callable):bool - Return true if N is prime. - - The optional false_positive_prob is the statistical probability - that true is returned even though it is not (pseudo-prime). - It defaults to 1e-6 (less than 1:1000000). - Note that the real probability of a false-positive is far less. This is - just the mathematically provable limit. - - If randfunc is omitted, then Random.new().read is used. - """ - if _fastmath is not None: - return _fastmath.isPrime(int(N), false_positive_prob, randfunc) - - if N < 3 or N & 1 == 0: - return N == 2 - for p in sieve_base: - if N == p: - return 1 - if N % p == 0: - return 0 - - rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) - return _rabinMillerTest(N, rounds, randfunc) - - -# Improved conversion functions contributed by Barry Warsaw, after -# careful benchmarking - -import struct - -def long_to_bytes(n, blocksize=0): - """long_to_bytes(n:long, blocksize:int) : string - Convert a long integer to a byte string. - - If optional blocksize is given and greater than zero, pad the front of the - byte string with binary zeros so that the length is a multiple of - blocksize. - """ - # after much testing, this algorithm was deemed to be the fastest - s = b('') - n = int(n) - pack = struct.pack - while n > 0: - s = pack('>I', n & 0xffffffff) + s - n = n >> 32 - # strip off leading zeros - for i in range(len(s)): - if s[i] != b('\000')[0]: - break - else: - # only happens when n == 0 - s = b('\000') - i = 0 - s = s[i:] - # add back some pad bytes. this could be done more efficiently w.r.t. the - # de-padding being done above, but sigh... - if blocksize > 0 and len(s) % blocksize: - s = (blocksize - len(s) % blocksize) * b('\000') + s - return s - -def bytes_to_long(s): - """bytes_to_long(string) : long - Convert a byte string to a long integer. - - This is (essentially) the inverse of long_to_bytes(). - """ - acc = 0 - unpack = struct.unpack - length = len(s) - if length % 4: - extra = (4 - length % 4) - s = b('\000') * extra + s - length = length + extra - for i in range(0, length, 4): - acc = (acc << 32) + unpack('>I', s[i:i+4])[0] - return acc - -# For backwards compatibility... -import warnings -def long2str(n, blocksize=0): - warnings.warn("long2str() has been replaced by long_to_bytes()") - return long_to_bytes(n, blocksize) -def str2long(s): - warnings.warn("str2long() has been replaced by bytes_to_long()") - return bytes_to_long(s) - -def _import_Random(): - # This is called in a function instead of at the module level in order to - # avoid problems with recursive imports - global Random, StrongRandom - from Crypto import Random - from Crypto.Random.random import StrongRandom - - - -# The first 10000 primes used for checking primality. -# This should be enough to eliminate most of the odd -# numbers before needing to do a Rabin-Miller test at all. -sieve_base = ( - 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, - 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, - 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, - 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, - 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, - 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, - 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, - 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, - 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, - 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, - 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, - 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, - 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, - 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, - 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, - 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, - 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, - 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, - 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, - 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, - 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, - 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, - 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, - 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, - 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, - 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, - 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, - 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, - 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, - 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, - 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, - 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, - 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, - 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, - 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, - 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, - 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, - 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, - 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, - 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, - 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, - 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, - 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, - 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, - 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, - 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, - 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, - 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, - 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, - 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, - 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, - 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, - 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, - 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, - 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, - 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, - 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, - 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, - 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, - 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, - 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, - 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, - 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, - 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, - 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, - 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, - 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, - 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, - 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, - 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, - 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, - 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, - 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, - 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, - 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, - 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, - 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, - 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, - 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, - 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, - 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, - 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, - 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, - 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, - 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, - 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, - 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, - 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, - 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, - 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, - 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, - 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, - 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, - 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, - 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, - 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, - 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, - 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, - 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, - 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, - 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, - 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, - 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, - 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, - 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, - 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, - 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, - 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, - 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, - 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, - 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, - 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, - 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, - 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, - 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, - 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, - 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, - 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, - 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, - 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, - 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, - 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, - 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, - 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, - 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, - 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, - 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, - 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, - 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, - 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, - 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, - 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, - 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, - 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, - 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, - 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, - 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, - 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, - 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, - 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, - 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, - 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, - 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, - 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, - 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, - 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, - 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, - 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, - 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, - 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, - 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, - 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, - 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, - 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, - 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, - 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, - 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, - 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, - 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, - 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, - 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, - 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, - 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, - 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, - 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, - 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, - 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, - 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, - 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, - 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, - 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, - 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, - 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, - 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, - 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, - 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, - 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, - 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, - 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, - 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, - 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, - 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, - 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, - 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, - 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, - 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, - 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, - 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, - 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, - 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, - 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, - 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, - 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, - 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, - 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, - 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, - 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, - 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, - 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, - 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, - 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, - 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, - 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, - 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, - 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, - 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, - 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, - 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, - 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, - 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, - 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, - 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, - 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, - 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, - 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, - 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, - 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, - 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, - 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, - 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, - 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, - 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, - 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, - 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, - 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, - 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, - 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, - 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, - 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, - 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, - 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, - 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, - 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, - 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, - 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, - 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, - 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, - 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, - 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, - 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, - 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, - 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, - 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, - 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, - 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, - 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, - 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, - 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, - 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, - 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, - 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, - 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, - 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, - 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, - 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, - 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, - 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, - 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, - 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, - 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, - 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, - 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, - 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, - 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, - 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, - 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, - 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, - 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, - 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, - 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, - 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, - 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, - 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, - 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, - 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, - 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, - 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, - 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, - 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, - 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, - 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, - 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, - 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, - 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, - 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, - 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, - 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, - 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, - 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, - 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, - 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, - 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, - 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, - 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, - 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, - 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, - 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, - 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, - 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, - 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, - 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, - 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, - 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, - 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, - 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, - 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, - 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, - 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, - 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, - 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, - 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, - 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, - 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, - 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, - 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, - 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, - 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, - 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, - 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, - 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, - 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, - 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, - 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, - 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, - 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, - 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, - 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, - 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, - 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, - 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, - 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, - 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, - 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, - 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, - 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, - 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, - 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, - 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, - 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, - 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, - 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, - 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, - 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, - 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, - 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, - 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, - 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, - 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, - 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, - 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, - 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, - 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, - 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, - 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, - 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, - 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, - 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, - 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, - 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, - 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, - 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, - 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, - 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, - 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, - 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, - 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, - 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, - 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, - 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, - 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, - 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, - 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, - 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, - 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, - 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, - 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, - 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, - 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, - 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, - 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, - 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, - 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, - 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, - 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, - 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, - 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, - 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, - 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, - 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, - 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, - 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, - 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, - 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, - 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, - 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, - 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, - 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, - 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, - 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, - 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, - 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, - 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, - 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, - 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, - 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, - 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, - 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, - 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, - 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, - 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, - 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, - 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, - 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, - 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, - 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, - 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, - 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, - 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, - 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, - 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, - 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, - 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, - 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, - 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, - 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, - 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, - 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, - 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, - 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, - 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, - 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, - 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, - 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, - 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, - 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, - 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, - 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, - 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, - 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, - 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, - 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, - 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, - 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, - 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, - 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, - 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, - 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, - 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, - 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, - 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, - 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, - 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, - 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, - 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, - 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, - 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, - 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, - 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, - 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, - 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, - 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, - 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, - 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, - 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, - 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, - 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, - 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, - 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, - 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, - 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, - 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, - 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, - 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, - 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, - 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, - 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, - 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, - 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, - 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, - 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, - 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, - 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, - 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, - 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, - 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, - 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, - 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, - 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, - 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, - 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, - 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, - 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, - 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, - 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, - 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, - 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, - 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, - 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, - 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, - 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, - 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, - 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, - 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, - 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, - 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, - 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, - 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, - 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, - 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, - 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, - 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, - 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, - 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, - 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, - 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, - 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, - 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, - 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, - 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, - 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, - 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, - 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, - 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, - 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, - 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, - 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, - 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, - 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, - 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, - 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, - 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, - 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, - 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, - 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, - 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, - 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, - 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, - 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, - 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, - 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, - 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, - 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, - 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, - 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, - 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, - 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, - 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, - 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, - 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, - 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, - 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, - 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, - 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, - 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, - 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, - 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, - 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, - 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, - 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, - 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, - 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, - 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, - 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, - 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, - 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, - 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, - 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, - 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, - 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, - 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, - 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, - 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, - 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, - 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, - 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, - 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, - 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, - 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, - 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, - 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, - 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, - 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, - 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, - 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, - 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, - 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, - 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, - 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, - 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, - 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, - 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, - 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, - 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, - 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, - 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, - 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, - 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, - 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, - 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, - 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, - 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, - 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, - 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, - 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, - 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, - 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, - 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, - 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, - 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, - 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, - 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, - 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, - 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, - 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, - 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, - 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, - 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, - 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, - 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, - 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, - 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, - 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, - 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, - 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, - 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, - 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, - 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, - 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, - 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, - 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, - 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, - 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, - 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, - 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, - 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, - 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, - 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, - 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, - 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, - 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, - 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, - 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, - 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, - 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, - 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, - 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, - 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, - 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, - 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, - 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, - 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, - 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, - 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, - 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, - 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, - 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, - 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, - 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, - 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, - 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, - 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, - 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, - 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, - 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, - 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, - 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, - 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, - 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, - 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, - 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, - 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, - 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, - 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, - 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, - 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, - 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, - 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, - 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, - 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, - 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, - 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, - 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, - 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, - 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, - 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, - 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, - 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, - 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, - 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, - 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, - 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, - 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, - 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, - 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, - 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, - 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, - 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, - 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, - 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, - 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, - 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, - 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, - 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, - 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, - 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, - 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, - 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, - 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, - 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, - 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, - 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, - 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, - 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, - 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, - 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, - 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, - 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, - 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, - 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, - 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, - 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, - 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, - 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, - 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, - 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, - 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, - 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, - 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, - 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, - 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, - 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, - 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, - 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, - 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, - 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, - 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, - 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, - 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, - 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, - 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, - 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, - 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, - 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, - 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, - 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, - 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, - 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, - 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, - 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, - 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, - 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, - 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, - 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, - 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, - 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, - 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, - 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, - 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, - 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, - 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, - 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, - 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, - 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, - 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, - 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, - 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, - 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, - 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, - 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, - 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, - 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, - 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, - 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, - 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, - 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, - 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, - 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, - 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, - 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, - 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, - 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, - 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, - 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, - 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, - 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, - 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, - 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, - 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, - 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, - 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, - 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, - 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, - 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, - 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, - 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, - 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, - 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, - 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, - 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, - 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, - 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, - 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, - 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, - 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, - 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, - 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, - 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, - 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, - 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, - 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, - 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, - 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, - 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, - 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, - 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, - 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, - 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, - 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, - 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, - 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, - 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, - 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, - 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, - 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, - 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, - 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, - 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, - 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, - 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, - 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, - 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, - 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, - 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, - 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, - 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, - 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, - 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, - 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, - 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, - 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, - 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, - 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, - 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, - 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, - 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, - 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, - 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, - 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, - 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, - 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, - 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, - 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, - 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, - 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, - 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, - 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, - 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, - 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, - 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, - 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, - 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, - 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, - 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, - 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, - 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, - 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, - 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, - 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, - 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, - 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, - 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, - 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, - 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, - 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, - 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, - 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, - 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, - 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, - 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, - 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, - 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, - 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, - 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, - 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, - 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, - 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, - 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, - 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, - 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, - 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, - 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, - 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, - 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, - 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, - 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, - 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, - 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, - 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, - 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, - 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, - 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, - 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, - 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, - 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, - 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, - 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, - 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, - 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, - 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, - 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, - 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, - 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, - 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, - 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, - 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, - 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, - 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, - 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, - 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, - 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, - 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, - 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, - 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, - 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, - 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, - 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, - 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, - 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, - 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, - 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, - 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, - 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, - 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, - 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, - 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, - 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, - 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, - 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, - 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, - 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, - 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, - 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, - 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, - 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, - 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, - 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, - 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, - 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, - 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, - 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, - 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, - 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, - 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, - 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, - 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, - 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, - 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, - 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, - 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, - 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, -100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, -100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, -100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, -100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, -100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, -100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, -100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, -100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, -101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, -101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, -101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, -101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, -101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, -101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, -101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, -101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, -101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, -102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, -102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, -102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, -102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, -102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, -102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, -102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, -102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, -102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, -103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, -103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, -103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, -103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, -103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, -103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, -103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, -103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, -104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, -104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, -104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, -104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, -104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, -104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, -) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/py3compat.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/py3compat.py deleted file mode 100644 index f8367c9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/py3compat.py +++ /dev/null @@ -1,107 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x -# -# Written in 2010 by Thorsten Behrens -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k - -In Python 2.x, strings (of type ''str'') contain binary data, including encoded -Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. -Unicode literals are specified via the u'...' prefix. Indexing or slicing -either type always produces a string of the same type as the original. -Data read from a file is always of '''str'' type. - -In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' -prefix and the ''unicode'' type are now redundant. A new type (called -''bytes'') has to be used for binary data (including any particular -''encoding'' of a string). The b'...' prefix allows one to specify a binary -literal. Indexing or slicing a string produces another string. Slicing a byte -string produces another byte string, but the indexing operation produces an -integer. Data read from a file is of '''str'' type if the file was opened in -text mode, or of ''bytes'' type otherwise. - -Since PyCrypto aims at supporting both Python 2.x and 3.x, the following helper -functions are used to keep the rest of the library as independent as possible -from the actual Python version. - -In general, the code should always deal with binary strings, and use integers -instead of 1-byte character strings. - -b(s) - Take a text string literal (with no prefix or with u'...' prefix) and - make a byte string. -bchr(c) - Take an integer and make a 1-character byte string. -bord(c) - Take the result of indexing on a byte string and make an integer. -tobytes(s) - Take a text string, a byte string, or a sequence of character taken from - a byte string and make a byte string. -""" - -__revision__ = "$Id$" - -import sys - -if sys.version_info[0] == 2: - def b(s): - return s - def bchr(s): - return chr(s) - def bstr(s): - return str(s) - def bord(s): - return ord(s) - if sys.version_info[1] == 1: - def tobytes(s): - try: - return s.encode('latin-1') - except: - return ''.join(s) - else: - def tobytes(s): - if isinstance(s, str): - return s.encode("latin-1") - else: - return ''.join(s) -else: - def b(s): - return s.encode("latin-1") # utf-8 would cause some side-effects we don't want - def bchr(s): - return bytes([s]) - def bstr(s): - if isinstance(s,str): - return bytes(s,"latin-1") - else: - return bytes(s) - def bord(s): - return s - def tobytes(s): - if isinstance(s,bytes): - return s - else: - if isinstance(s,str): - return s.encode("latin-1") - else: - return bytes(s) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/randpool.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/randpool.py deleted file mode 100644 index 8b5a0b7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/randpool.py +++ /dev/null @@ -1,82 +0,0 @@ -# -# randpool.py : Cryptographically strong random number generation -# -# Part of the Python Cryptography Toolkit -# -# Written by Andrew M. Kuchling, Mark Moraes, and others -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== -# - -__revision__ = "$Id$" - -from Crypto.pct_warnings import RandomPool_DeprecationWarning -import Crypto.Random -import warnings - -class RandomPool: - """Deprecated. Use Random.new() instead. - - See http://www.pycrypto.org/randpool-broken - """ - def __init__(self, numbytes = 160, cipher=None, hash=None, file=None): - warnings.warn("This application uses RandomPool, which is BROKEN in older releases. See http://www.pycrypto.org/randpool-broken", - RandomPool_DeprecationWarning) - self.__rng = Crypto.Random.new() - self.bytes = numbytes - self.bits = self.bytes * 8 - self.entropy = self.bits - - def get_bytes(self, N): - return self.__rng.read(N) - - def _updateEntropyEstimate(self, nbits): - self.entropy += nbits - if self.entropy < 0: - self.entropy = 0 - elif self.entropy > self.bits: - self.entropy = self.bits - - def _randomize(self, N=0, devname="/dev/urandom"): - """Dummy _randomize() function""" - self.__rng.flush() - - def randomize(self, N=0): - """Dummy randomize() function""" - self.__rng.flush() - - def stir(self, s=''): - """Dummy stir() function""" - self.__rng.flush() - - def stir_n(self, N=3): - """Dummy stir_n() function""" - self.__rng.flush() - - def add_event(self, s=''): - """Dummy add_event() function""" - self.__rng.flush() - - def getBytes(self, N): - """Dummy getBytes() function""" - return self.get_bytes(N) - - def addEvent(self, event, s=""): - """Dummy addEvent() function""" - return self.add_event() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/strxor.cpython-37m-x86_64-linux-gnu.so b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/strxor.cpython-37m-x86_64-linux-gnu.so deleted file mode 100755 index 1973cd9..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/strxor.cpython-37m-x86_64-linux-gnu.so and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/winrandom.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/winrandom.py deleted file mode 100644 index 0242815..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/Util/winrandom.py +++ /dev/null @@ -1,28 +0,0 @@ -# -# Util/winrandom.py : Stub for Crypto.Random.OSRNG.winrandom -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -__revision__ = "$Id$" - -from Crypto.Random.OSRNG.winrandom import * - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/__init__.py deleted file mode 100644 index c27402e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -"""Python Cryptography Toolkit - -A collection of cryptographic modules implementing various algorithms -and protocols. - -Subpackages: - -Crypto.Cipher - Secret-key (AES, DES, ARC4) and public-key encryption (RSA PKCS#1) algorithms -Crypto.Hash - Hashing algorithms (MD5, SHA, HMAC) -Crypto.Protocol - Cryptographic protocols (Chaffing, all-or-nothing transform, key derivation - functions). This package does not contain any network protocols. -Crypto.PublicKey - Public-key encryption and signature algorithms (RSA, DSA) -Crypto.Signature - Public-key signature algorithms (RSA PKCS#1) -Crypto.Util - Various useful modules and functions (long-to-string conversion, random number - generation, number theoretic functions) -""" - -__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature'] - -__version__ = '2.6.1' # See also below and setup.py -__revision__ = "$Id$" - -# New software should look at this instead of at __version__ above. -version_info = (2, 6, 1, 'final', 0) # See also above and setup.py - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/pct_warnings.py b/.tox/py37-normal/lib/python3.7/site-packages/Crypto/pct_warnings.py deleted file mode 100644 index 9b4361e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/Crypto/pct_warnings.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: ascii -*- -# -# pct_warnings.py : PyCrypto warnings file -# -# Written in 2008 by Dwayne C. Litzenberger -# -# =================================================================== -# The contents of this file are dedicated to the public domain. To -# the extent that dedication to the public domain is not available, -# everyone is granted a worldwide, perpetual, royalty-free, -# non-exclusive license to exercise all rights associated with the -# contents of this file for any purpose whatsoever. -# No rights are reserved. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -# =================================================================== - -# -# Base classes. All our warnings inherit from one of these in order to allow -# the user to specifically filter them. -# - -class CryptoWarning(Warning): - """Base class for PyCrypto warnings""" - -class CryptoDeprecationWarning(DeprecationWarning, CryptoWarning): - """Base PyCrypto DeprecationWarning class""" - -class CryptoRuntimeWarning(RuntimeWarning, CryptoWarning): - """Base PyCrypto RuntimeWarning class""" - -# -# Warnings that we might actually use -# - -class RandomPool_DeprecationWarning(CryptoDeprecationWarning): - """Issued when Crypto.Util.randpool.RandomPool is instantiated.""" - -class ClockRewindWarning(CryptoRuntimeWarning): - """Warning for when the system clock moves backwards.""" - -class GetRandomNumber_DeprecationWarning(CryptoDeprecationWarning): - """Issued when Crypto.Util.number.getRandomNumber is invoked.""" - -class PowmInsecureWarning(CryptoRuntimeWarning): - """Warning for when _fastmath is built without mpz_powm_sec""" - -# By default, we want this warning to be shown every time we compensate for -# clock rewinding. -import warnings as _warnings -_warnings.filterwarnings('always', category=ClockRewindWarning, append=1) - -# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/METADATA deleted file mode 100644 index a07d53d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/METADATA +++ /dev/null @@ -1,144 +0,0 @@ -Metadata-Version: 2.1 -Name: atomicwrites -Version: 1.3.0 -Summary: Atomic file writes. -Home-page: https://github.com/untitaker/python-atomicwrites -Author: Markus Unterwaditzer -Author-email: markus@unterwaditzer.net -License: MIT -Platform: UNKNOWN -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: Implementation :: CPython -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* - -=================== -python-atomicwrites -=================== - -.. image:: https://travis-ci.org/untitaker/python-atomicwrites.svg?branch=master - :target: https://travis-ci.org/untitaker/python-atomicwrites - -.. image:: https://ci.appveyor.com/api/projects/status/vadc4le3c27to59x/branch/master?svg=true - :target: https://ci.appveyor.com/project/untitaker/python-atomicwrites/branch/master - -Atomic file writes. - -.. code-block:: python - - from atomicwrites import atomic_write - - with atomic_write('foo.txt', overwrite=True) as f: - f.write('Hello world.') - # "foo.txt" doesn't exist yet. - - # Now it does. - - -Features that distinguish it from other similar libraries (see `Alternatives and Credit`_): - -- Race-free assertion that the target file doesn't yet exist. This can be - controlled with the ``overwrite`` parameter. - -- Windows support, although not well-tested. The MSDN resources are not very - explicit about which operations are atomic. I'm basing my assumptions off `a - comment - `_ - by `Doug Crook - `_, who appears - to be a Microsoft employee: - - FAQ: Is MoveFileEx atomic - Frequently asked question: Is MoveFileEx atomic if the existing and new - files are both on the same drive? - - The simple answer is "usually, but in some cases it will silently fall-back - to a non-atomic method, so don't count on it". - - The implementation of MoveFileEx looks something like this: [...] - - The problem is if the rename fails, you might end up with a CopyFile, which - is definitely not atomic. - - If you really need atomic-or-nothing, you can try calling - NtSetInformationFile, which is unsupported but is much more likely to be - atomic. - -- Simple high-level API that wraps a very flexible class-based API. - -- Consistent error handling across platforms. - - -How it works -============ - -It uses a temporary file in the same directory as the given path. This ensures -that the temporary file resides on the same filesystem. - -The temporary file will then be atomically moved to the target location: On -POSIX, it will use ``rename`` if files should be overwritten, otherwise a -combination of ``link`` and ``unlink``. On Windows, it uses MoveFileEx_ through -stdlib's ``ctypes`` with the appropriate flags. - -Note that with ``link`` and ``unlink``, there's a timewindow where the file -might be available under two entries in the filesystem: The name of the -temporary file, and the name of the target file. - -Also note that the permissions of the target file may change this way. In some -situations a ``chmod`` can be issued without any concurrency problems, but -since that is not always the case, this library doesn't do it by itself. - -.. _MoveFileEx: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365240%28v=vs.85%29.aspx - -fsync ------ - -On POSIX, ``fsync`` is invoked on the temporary file after it is written (to -flush file content and metadata), and on the parent directory after the file is -moved (to flush filename). - -``fsync`` does not take care of disks' internal buffers, but there don't seem -to be any standard POSIX APIs for that. On OS X, ``fcntl`` is used with -``F_FULLFSYNC`` instead of ``fsync`` for that reason. - -On Windows, `_commit `_ -is used, but there are no guarantees about disk internal buffers. - -Alternatives and Credit -======================= - -Atomicwrites is directly inspired by the following libraries (and shares a -minimal amount of code): - -- The Trac project's `utility functions - `_, - also used in `Werkzeug `_ and - `mitsuhiko/python-atomicfile - `_. The idea to use - ``ctypes`` instead of ``PyWin32`` originated there. - -- `abarnert/fatomic `_. Windows support - (based on ``PyWin32``) was originally taken from there. - -Other alternatives to atomicwrites include: - -- `sashka/atomicfile `_. Originally I - considered using that, but at the time it was lacking a lot of features I - needed (Windows support, overwrite-parameter, overriding behavior through - subclassing). - -- The `Boltons library collection `_ - features a class for atomic file writes, which seems to have a very similar - ``overwrite`` parameter. It is lacking Windows support though. - -License -======= - -Licensed under the MIT, see ``LICENSE``. - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/RECORD deleted file mode 100644 index 6fb2cd6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/RECORD +++ /dev/null @@ -1,7 +0,0 @@ -atomicwrites-1.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -atomicwrites-1.3.0.dist-info/METADATA,sha256=T9BgDww1IuYBG4de40vAgvpSbNBrRtNjTBNcukt1HZU,5535 -atomicwrites-1.3.0.dist-info/RECORD,, -atomicwrites-1.3.0.dist-info/WHEEL,sha256=gduuPyBvFJQSQ0zdyxF7k0zynDXbIbvg5ZBHoXum5uk,110 -atomicwrites-1.3.0.dist-info/top_level.txt,sha256=ks64zKVUkrl2ZrrP046CsytXlSGf8gLG-IcoXpNyeoc,13 -atomicwrites/__init__.py,sha256=00DapdQb04-k79KZjzzfwnI1Q8nfsiF5TPeVcqbGVw0,6562 -atomicwrites/__pycache__/__init__.cpython-37.pyc,, diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/WHEEL deleted file mode 100644 index 1316c41..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.31.1) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/top_level.txt deleted file mode 100644 index 5fa5a87..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites-1.3.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -atomicwrites diff --git a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites/__init__.py deleted file mode 100644 index cfa5c29..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/atomicwrites/__init__.py +++ /dev/null @@ -1,216 +0,0 @@ -import contextlib -import io -import os -import sys -import tempfile - -try: - import fcntl -except ImportError: - fcntl = None - -__version__ = '1.3.0' - - -PY2 = sys.version_info[0] == 2 - -text_type = unicode if PY2 else str # noqa - - -def _path_to_unicode(x): - if not isinstance(x, text_type): - return x.decode(sys.getfilesystemencoding()) - return x - - -DEFAULT_MODE = "wb" if PY2 else "w" - - -_proper_fsync = os.fsync - - -if sys.platform != 'win32': - if hasattr(fcntl, 'F_FULLFSYNC'): - def _proper_fsync(fd): - # https://lists.apple.com/archives/darwin-dev/2005/Feb/msg00072.html - # https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man2/fsync.2.html - # https://github.com/untitaker/python-atomicwrites/issues/6 - fcntl.fcntl(fd, fcntl.F_FULLFSYNC) - - def _sync_directory(directory): - # Ensure that filenames are written to disk - fd = os.open(directory, 0) - try: - _proper_fsync(fd) - finally: - os.close(fd) - - def _replace_atomic(src, dst): - os.rename(src, dst) - _sync_directory(os.path.normpath(os.path.dirname(dst))) - - def _move_atomic(src, dst): - os.link(src, dst) - os.unlink(src) - - src_dir = os.path.normpath(os.path.dirname(src)) - dst_dir = os.path.normpath(os.path.dirname(dst)) - _sync_directory(dst_dir) - if src_dir != dst_dir: - _sync_directory(src_dir) -else: - from ctypes import windll, WinError - - _MOVEFILE_REPLACE_EXISTING = 0x1 - _MOVEFILE_WRITE_THROUGH = 0x8 - _windows_default_flags = _MOVEFILE_WRITE_THROUGH - - def _handle_errors(rv): - if not rv: - raise WinError() - - def _replace_atomic(src, dst): - _handle_errors(windll.kernel32.MoveFileExW( - _path_to_unicode(src), _path_to_unicode(dst), - _windows_default_flags | _MOVEFILE_REPLACE_EXISTING - )) - - def _move_atomic(src, dst): - _handle_errors(windll.kernel32.MoveFileExW( - _path_to_unicode(src), _path_to_unicode(dst), - _windows_default_flags - )) - - -def replace_atomic(src, dst): - ''' - Move ``src`` to ``dst``. If ``dst`` exists, it will be silently - overwritten. - - Both paths must reside on the same filesystem for the operation to be - atomic. - ''' - return _replace_atomic(src, dst) - - -def move_atomic(src, dst): - ''' - Move ``src`` to ``dst``. There might a timewindow where both filesystem - entries exist. If ``dst`` already exists, :py:exc:`FileExistsError` will be - raised. - - Both paths must reside on the same filesystem for the operation to be - atomic. - ''' - return _move_atomic(src, dst) - - -class AtomicWriter(object): - ''' - A helper class for performing atomic writes. Usage:: - - with AtomicWriter(path).open() as f: - f.write(...) - - :param path: The destination filepath. May or may not exist. - :param mode: The filemode for the temporary file. This defaults to `wb` in - Python 2 and `w` in Python 3. - :param overwrite: If set to false, an error is raised if ``path`` exists. - Errors are only raised after the file has been written to. Either way, - the operation is atomic. - - If you need further control over the exact behavior, you are encouraged to - subclass. - ''' - - def __init__(self, path, mode=DEFAULT_MODE, overwrite=False, - **open_kwargs): - if 'a' in mode: - raise ValueError( - 'Appending to an existing file is not supported, because that ' - 'would involve an expensive `copy`-operation to a temporary ' - 'file. Open the file in normal `w`-mode and copy explicitly ' - 'if that\'s what you\'re after.' - ) - if 'x' in mode: - raise ValueError('Use the `overwrite`-parameter instead.') - if 'w' not in mode: - raise ValueError('AtomicWriters can only be written to.') - - self._path = path - self._mode = mode - self._overwrite = overwrite - self._open_kwargs = open_kwargs - - def open(self): - ''' - Open the temporary file. - ''' - return self._open(self.get_fileobject) - - @contextlib.contextmanager - def _open(self, get_fileobject): - f = None # make sure f exists even if get_fileobject() fails - try: - success = False - with get_fileobject(**self._open_kwargs) as f: - yield f - self.sync(f) - self.commit(f) - success = True - finally: - if not success: - try: - self.rollback(f) - except Exception: - pass - - def get_fileobject(self, suffix="", prefix=tempfile.template, dir=None, - **kwargs): - '''Return the temporary file to use.''' - if dir is None: - dir = os.path.normpath(os.path.dirname(self._path)) - descriptor, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, - dir=dir) - # io.open() will take either the descriptor or the name, but we need - # the name later for commit()/replace_atomic() and couldn't find a way - # to get the filename from the descriptor. - os.close(descriptor) - kwargs['mode'] = self._mode - kwargs['file'] = name - return io.open(**kwargs) - - def sync(self, f): - '''responsible for clearing as many file caches as possible before - commit''' - f.flush() - _proper_fsync(f.fileno()) - - def commit(self, f): - '''Move the temporary file to the target location.''' - if self._overwrite: - replace_atomic(f.name, self._path) - else: - move_atomic(f.name, self._path) - - def rollback(self, f): - '''Clean up all temporary resources.''' - os.unlink(f.name) - - -def atomic_write(path, writer_cls=AtomicWriter, **cls_kwargs): - ''' - Simple atomic writes. This wraps :py:class:`AtomicWriter`:: - - with atomic_write(path) as f: - f.write(...) - - :param path: The target path to write to. - :param writer_cls: The writer class to use. This parameter is useful if you - subclassed :py:class:`AtomicWriter` to change some behavior and want to - use that new subclass. - - Additional keyword arguments are passed to the writer class. See - :py:class:`AtomicWriter`. - ''' - return writer_cls(path, **cls_kwargs).open() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.py deleted file mode 100644 index 9ff4d47..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import absolute_import, division, print_function - -from functools import partial - -from . import converters, exceptions, filters, validators -from ._config import get_run_validators, set_run_validators -from ._funcs import asdict, assoc, astuple, evolve, has -from ._make import ( - NOTHING, - Attribute, - Factory, - attrib, - attrs, - fields, - fields_dict, - make_class, - validate, -) -from ._version_info import VersionInfo - - -__version__ = "19.3.0" -__version_info__ = VersionInfo._from_version_string(__version__) - -__title__ = "attrs" -__description__ = "Classes Without Boilerplate" -__url__ = "https://www.attrs.org/" -__uri__ = __url__ -__doc__ = __description__ + " <" + __uri__ + ">" - -__author__ = "Hynek Schlawack" -__email__ = "hs@ox.cx" - -__license__ = "MIT" -__copyright__ = "Copyright (c) 2015 Hynek Schlawack" - - -s = attributes = attrs -ib = attr = attrib -dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) - - -__all__ = [ - "Attribute", - "Factory", - "NOTHING", - "asdict", - "assoc", - "astuple", - "attr", - "attrib", - "attributes", - "attrs", - "converters", - "evolve", - "exceptions", - "fields", - "fields_dict", - "filters", - "get_run_validators", - "has", - "ib", - "make_class", - "s", - "set_run_validators", - "validate", - "validators", -] diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.pyi deleted file mode 100644 index 38f16f0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/__init__.pyi +++ /dev/null @@ -1,278 +0,0 @@ -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Sequence, - Mapping, - Tuple, - Type, - TypeVar, - Union, - overload, -) - -# `import X as X` is required to make these public -from . import exceptions as exceptions -from . import filters as filters -from . import converters as converters -from . import validators as validators - -from ._version_info import VersionInfo - -__version__: str -__version_info__: VersionInfo -__title__: str -__description__: str -__url__: str -__uri__: str -__author__: str -__email__: str -__license__: str -__copyright__: str - -_T = TypeVar("_T") -_C = TypeVar("_C", bound=type) - -_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] -_ConverterType = Callable[[Any], _T] -_FilterType = Callable[[Attribute[_T], _T], bool] -_ReprType = Callable[[Any], str] -_ReprArgType = Union[bool, _ReprType] -# FIXME: in reality, if multiple validators are passed they must be in a list or tuple, -# but those are invariant and so would prevent subtypes of _ValidatorType from working -# when passed in a list or tuple. -_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] - -# _make -- - -NOTHING: object - -# NOTE: Factory lies about its return type to make this possible: `x: List[int] = Factory(list)` -# Work around mypy issue #4554 in the common case by using an overload. -@overload -def Factory(factory: Callable[[], _T]) -> _T: ... -@overload -def Factory( - factory: Union[Callable[[Any], _T], Callable[[], _T]], - takes_self: bool = ..., -) -> _T: ... - -class Attribute(Generic[_T]): - name: str - default: Optional[_T] - validator: Optional[_ValidatorType[_T]] - repr: _ReprArgType - cmp: bool - eq: bool - order: bool - hash: Optional[bool] - init: bool - converter: Optional[_ConverterType[_T]] - metadata: Dict[Any, Any] - type: Optional[Type[_T]] - kw_only: bool - -# NOTE: We had several choices for the annotation to use for type arg: -# 1) Type[_T] -# - Pros: Handles simple cases correctly -# - Cons: Might produce less informative errors in the case of conflicting TypeVars -# e.g. `attr.ib(default='bad', type=int)` -# 2) Callable[..., _T] -# - Pros: Better error messages than #1 for conflicting TypeVars -# - Cons: Terrible error messages for validator checks. -# e.g. attr.ib(type=int, validator=validate_str) -# -> error: Cannot infer function type argument -# 3) type (and do all of the work in the mypy plugin) -# - Pros: Simple here, and we could customize the plugin with our own errors. -# - Cons: Would need to write mypy plugin code to handle all the cases. -# We chose option #1. - -# `attr` lies about its return type to make the following possible: -# attr() -> Any -# attr(8) -> int -# attr(validator=) -> Whatever the callable expects. -# This makes this type of assignments possible: -# x: int = attr(8) -# -# This form catches explicit None or no default but with no other arguments returns Any. -@overload -def attrib( - default: None = ..., - validator: None = ..., - repr: _ReprArgType = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: None = ..., - converter: None = ..., - factory: None = ..., - kw_only: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> Any: ... - -# This form catches an explicit None or no default and infers the type from the other arguments. -@overload -def attrib( - default: None = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType[_T]] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> _T: ... - -# This form catches an explicit default argument. -@overload -def attrib( - default: _T, - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: Optional[Type[_T]] = ..., - converter: Optional[_ConverterType[_T]] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> _T: ... - -# This form covers type=non-Type: e.g. forward references (str), Any -@overload -def attrib( - default: Optional[_T] = ..., - validator: Optional[_ValidatorArgType[_T]] = ..., - repr: _ReprArgType = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - metadata: Optional[Mapping[Any, Any]] = ..., - type: object = ..., - converter: Optional[_ConverterType[_T]] = ..., - factory: Optional[Callable[[], _T]] = ..., - kw_only: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> Any: ... -@overload -def attrs( - maybe_cls: _C, - these: Optional[Dict[str, Any]] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> _C: ... -@overload -def attrs( - maybe_cls: None = ..., - these: Optional[Dict[str, Any]] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> Callable[[_C], _C]: ... - -# TODO: add support for returning NamedTuple from the mypy plugin -class _Fields(Tuple[Attribute[Any], ...]): - def __getattr__(self, name: str) -> Attribute[Any]: ... - -def fields(cls: type) -> _Fields: ... -def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... -def validate(inst: Any) -> None: ... - -# TODO: add support for returning a proper attrs class from the mypy plugin -# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', [attr.ib()])` is valid -def make_class( - name: str, - attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], - bases: Tuple[type, ...] = ..., - repr_ns: Optional[str] = ..., - repr: bool = ..., - cmp: Optional[bool] = ..., - hash: Optional[bool] = ..., - init: bool = ..., - slots: bool = ..., - frozen: bool = ..., - weakref_slot: bool = ..., - str: bool = ..., - auto_attribs: bool = ..., - kw_only: bool = ..., - cache_hash: bool = ..., - auto_exc: bool = ..., - eq: Optional[bool] = ..., - order: Optional[bool] = ..., -) -> type: ... - -# _funcs -- - -# TODO: add support for returning TypedDict from the mypy plugin -# FIXME: asdict/astuple do not honor their factory args. waiting on one of these: -# https://github.com/python/mypy/issues/4236 -# https://github.com/python/typing/issues/253 -def asdict( - inst: Any, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - dict_factory: Type[Mapping[Any, Any]] = ..., - retain_collection_types: bool = ..., -) -> Dict[str, Any]: ... - -# TODO: add support for returning NamedTuple from the mypy plugin -def astuple( - inst: Any, - recurse: bool = ..., - filter: Optional[_FilterType[Any]] = ..., - tuple_factory: Type[Sequence[Any]] = ..., - retain_collection_types: bool = ..., -) -> Tuple[Any, ...]: ... -def has(cls: type) -> bool: ... -def assoc(inst: _T, **changes: Any) -> _T: ... -def evolve(inst: _T, **changes: Any) -> _T: ... - -# _config -- - -def set_run_validators(run: bool) -> None: ... -def get_run_validators() -> bool: ... - -# aliases -- - -s = attributes = attrs -ib = attr = attrib -dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_compat.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/_compat.py deleted file mode 100644 index a915db8..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_compat.py +++ /dev/null @@ -1,230 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import platform -import sys -import types -import warnings - - -PY2 = sys.version_info[0] == 2 -PYPY = platform.python_implementation() == "PyPy" - - -if PYPY or sys.version_info[:2] >= (3, 6): - ordered_dict = dict -else: - from collections import OrderedDict - - ordered_dict = OrderedDict - - -if PY2: - from UserDict import IterableUserDict - from collections import Mapping, Sequence - - # We 'bundle' isclass instead of using inspect as importing inspect is - # fairly expensive (order of 10-15 ms for a modern machine in 2016) - def isclass(klass): - return isinstance(klass, (type, types.ClassType)) - - # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. - TYPE = "type" - - def iteritems(d): - return d.iteritems() - - # Python 2 is bereft of a read-only dict proxy, so we make one! - class ReadOnlyDict(IterableUserDict): - """ - Best-effort read-only dict wrapper. - """ - - def __setitem__(self, key, val): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item assignment" - ) - - def update(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'update'" - ) - - def __delitem__(self, _): - # We gently pretend we're a Python 3 mappingproxy. - raise TypeError( - "'mappingproxy' object does not support item deletion" - ) - - def clear(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'clear'" - ) - - def pop(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'pop'" - ) - - def popitem(self): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'popitem'" - ) - - def setdefault(self, key, default=None): - # We gently pretend we're a Python 3 mappingproxy. - raise AttributeError( - "'mappingproxy' object has no attribute 'setdefault'" - ) - - def __repr__(self): - # Override to be identical to the Python 3 version. - return "mappingproxy(" + repr(self.data) + ")" - - def metadata_proxy(d): - res = ReadOnlyDict() - res.data.update(d) # We blocked update, so we have to do it like this. - return res - - def just_warn(*args, **kw): # pragma: nocover - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - - -else: # Python 3 and later. - from collections.abc import Mapping, Sequence # noqa - - def just_warn(*args, **kw): - """ - We only warn on Python 3 because we are not aware of any concrete - consequences of not setting the cell on Python 2. - """ - warnings.warn( - "Running interpreter doesn't sufficiently support code object " - "introspection. Some features like bare super() or accessing " - "__class__ will not work with slotted classes.", - RuntimeWarning, - stacklevel=2, - ) - - def isclass(klass): - return isinstance(klass, type) - - TYPE = "class" - - def iteritems(d): - return d.items() - - def metadata_proxy(d): - return types.MappingProxyType(dict(d)) - - -def make_set_closure_cell(): - """Return a function of two arguments (cell, value) which sets - the value stored in the closure cell `cell` to `value`. - """ - # pypy makes this easy. (It also supports the logic below, but - # why not do the easy/fast thing?) - if PYPY: # pragma: no cover - - def set_closure_cell(cell, value): - cell.__setstate__((value,)) - - return set_closure_cell - - # Otherwise gotta do it the hard way. - - # Create a function that will set its first cellvar to `value`. - def set_first_cellvar_to(value): - x = value - return - - # This function will be eliminated as dead code, but - # not before its reference to `x` forces `x` to be - # represented as a closure cell rather than a local. - def force_x_to_be_a_cell(): # pragma: no cover - return x - - try: - # Extract the code object and make sure our assumptions about - # the closure behavior are correct. - if PY2: - co = set_first_cellvar_to.func_code - else: - co = set_first_cellvar_to.__code__ - if co.co_cellvars != ("x",) or co.co_freevars != (): - raise AssertionError # pragma: no cover - - # Convert this code object to a code object that sets the - # function's first _freevar_ (not cellvar) to the argument. - if sys.version_info >= (3, 8): - # CPython 3.8+ has an incompatible CodeType signature - # (added a posonlyargcount argument) but also added - # CodeType.replace() to do this without counting parameters. - set_first_freevar_code = co.replace( - co_cellvars=co.co_freevars, co_freevars=co.co_cellvars - ) - else: - args = [co.co_argcount] - if not PY2: - args.append(co.co_kwonlyargcount) - args.extend( - [ - co.co_nlocals, - co.co_stacksize, - co.co_flags, - co.co_code, - co.co_consts, - co.co_names, - co.co_varnames, - co.co_filename, - co.co_name, - co.co_firstlineno, - co.co_lnotab, - # These two arguments are reversed: - co.co_cellvars, - co.co_freevars, - ] - ) - set_first_freevar_code = types.CodeType(*args) - - def set_closure_cell(cell, value): - # Create a function using the set_first_freevar_code, - # whose first closure cell is `cell`. Calling it will - # change the value of that cell. - setter = types.FunctionType( - set_first_freevar_code, {}, "setter", (), (cell,) - ) - # And call it to set the cell. - setter(value) - - # Make sure it works on this interpreter: - def make_func_with_cell(): - x = None - - def func(): - return x # pragma: no cover - - return func - - if PY2: - cell = make_func_with_cell().func_closure[0] - else: - cell = make_func_with_cell().__closure__[0] - set_closure_cell(cell, 100) - if cell.cell_contents != 100: - raise AssertionError # pragma: no cover - - except Exception: - return just_warn - else: - return set_closure_cell - - -set_closure_cell = make_set_closure_cell() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_config.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/_config.py deleted file mode 100644 index 8ec9209..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_config.py +++ /dev/null @@ -1,23 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -__all__ = ["set_run_validators", "get_run_validators"] - -_run_validators = True - - -def set_run_validators(run): - """ - Set whether or not validators are run. By default, they are run. - """ - if not isinstance(run, bool): - raise TypeError("'run' must be bool.") - global _run_validators - _run_validators = run - - -def get_run_validators(): - """ - Return whether or not validators are run. - """ - return _run_validators diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_funcs.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/_funcs.py deleted file mode 100644 index c077e42..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_funcs.py +++ /dev/null @@ -1,290 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy - -from ._compat import iteritems -from ._make import NOTHING, _obj_setattr, fields -from .exceptions import AttrsAttributeNotFoundError - - -def asdict( - inst, - recurse=True, - filter=None, - dict_factory=dict, - retain_collection_types=False, -): - """ - Return the ``attrs`` attribute values of *inst* as a dict. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable dict_factory: A callable to produce dictionaries from. For - example, to produce ordered dictionaries instead of normal Python - dictionaries, pass in ``collections.OrderedDict``. - :param bool retain_collection_types: Do not convert to ``list`` when - encountering an attribute whose type is ``tuple`` or ``set``. Only - meaningful if ``recurse`` is ``True``. - - :rtype: return type of *dict_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.0.0 *dict_factory* - .. versionadded:: 16.1.0 *retain_collection_types* - """ - attrs = fields(inst.__class__) - rv = dict_factory() - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - if recurse is True: - if has(v.__class__): - rv[a.name] = asdict( - v, True, filter, dict_factory, retain_collection_types - ) - elif isinstance(v, (tuple, list, set)): - cf = v.__class__ if retain_collection_types is True else list - rv[a.name] = cf( - [ - _asdict_anything( - i, filter, dict_factory, retain_collection_types - ) - for i in v - ] - ) - elif isinstance(v, dict): - df = dict_factory - rv[a.name] = df( - ( - _asdict_anything( - kk, filter, df, retain_collection_types - ), - _asdict_anything( - vv, filter, df, retain_collection_types - ), - ) - for kk, vv in iteritems(v) - ) - else: - rv[a.name] = v - else: - rv[a.name] = v - return rv - - -def _asdict_anything(val, filter, dict_factory, retain_collection_types): - """ - ``asdict`` only works on attrs instances, this works on anything. - """ - if getattr(val.__class__, "__attrs_attrs__", None) is not None: - # Attrs class. - rv = asdict(val, True, filter, dict_factory, retain_collection_types) - elif isinstance(val, (tuple, list, set)): - cf = val.__class__ if retain_collection_types is True else list - rv = cf( - [ - _asdict_anything( - i, filter, dict_factory, retain_collection_types - ) - for i in val - ] - ) - elif isinstance(val, dict): - df = dict_factory - rv = df( - ( - _asdict_anything(kk, filter, df, retain_collection_types), - _asdict_anything(vv, filter, df, retain_collection_types), - ) - for kk, vv in iteritems(val) - ) - else: - rv = val - return rv - - -def astuple( - inst, - recurse=True, - filter=None, - tuple_factory=tuple, - retain_collection_types=False, -): - """ - Return the ``attrs`` attribute values of *inst* as a tuple. - - Optionally recurse into other ``attrs``-decorated classes. - - :param inst: Instance of an ``attrs``-decorated class. - :param bool recurse: Recurse into classes that are also - ``attrs``-decorated. - :param callable filter: A callable whose return code determines whether an - attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the - value as the second argument. - :param callable tuple_factory: A callable to produce tuples from. For - example, to produce lists instead of tuples. - :param bool retain_collection_types: Do not convert to ``list`` - or ``dict`` when encountering an attribute which type is - ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is - ``True``. - - :rtype: return type of *tuple_factory* - - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 16.2.0 - """ - attrs = fields(inst.__class__) - rv = [] - retain = retain_collection_types # Very long. :/ - for a in attrs: - v = getattr(inst, a.name) - if filter is not None and not filter(a, v): - continue - if recurse is True: - if has(v.__class__): - rv.append( - astuple( - v, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - ) - elif isinstance(v, (tuple, list, set)): - cf = v.__class__ if retain is True else list - rv.append( - cf( - [ - astuple( - j, - recurse=True, - filter=filter, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(j.__class__) - else j - for j in v - ] - ) - ) - elif isinstance(v, dict): - df = v.__class__ if retain is True else dict - rv.append( - df( - ( - astuple( - kk, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(kk.__class__) - else kk, - astuple( - vv, - tuple_factory=tuple_factory, - retain_collection_types=retain, - ) - if has(vv.__class__) - else vv, - ) - for kk, vv in iteritems(v) - ) - ) - else: - rv.append(v) - else: - rv.append(v) - return rv if tuple_factory is list else tuple_factory(rv) - - -def has(cls): - """ - Check whether *cls* is a class with ``attrs`` attributes. - - :param type cls: Class to introspect. - :raise TypeError: If *cls* is not a class. - - :rtype: bool - """ - return getattr(cls, "__attrs_attrs__", None) is not None - - -def assoc(inst, **changes): - """ - Copy *inst* and apply *changes*. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't - be found on *cls*. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. deprecated:: 17.1.0 - Use `evolve` instead. - """ - import warnings - - warnings.warn( - "assoc is deprecated and will be removed after 2018/01.", - DeprecationWarning, - stacklevel=2, - ) - new = copy.copy(inst) - attrs = fields(inst.__class__) - for k, v in iteritems(changes): - a = getattr(attrs, k, NOTHING) - if a is NOTHING: - raise AttrsAttributeNotFoundError( - "{k} is not an attrs attribute on {cl}.".format( - k=k, cl=new.__class__ - ) - ) - _obj_setattr(new, k, v) - return new - - -def evolve(inst, **changes): - """ - Create a new instance, based on *inst* with *changes* applied. - - :param inst: Instance of a class with ``attrs`` attributes. - :param changes: Keyword changes in the new copy. - - :return: A copy of inst with *changes* incorporated. - - :raise TypeError: If *attr_name* couldn't be found in the class - ``__init__``. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - .. versionadded:: 17.1.0 - """ - cls = inst.__class__ - attrs = fields(cls) - for a in attrs: - if not a.init: - continue - attr_name = a.name # To deal with private attributes. - init_name = attr_name if attr_name[0] != "_" else attr_name[1:] - if init_name not in changes: - changes[init_name] = getattr(inst, attr_name) - return cls(**changes) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_make.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/_make.py deleted file mode 100644 index 46f9c54..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_make.py +++ /dev/null @@ -1,2168 +0,0 @@ -from __future__ import absolute_import, division, print_function - -import copy -import linecache -import sys -import threading -import uuid -import warnings - -from operator import itemgetter - -from . import _config -from ._compat import ( - PY2, - isclass, - iteritems, - metadata_proxy, - ordered_dict, - set_closure_cell, -) -from .exceptions import ( - DefaultAlreadySetError, - FrozenInstanceError, - NotAnAttrsClassError, - PythonTooOldError, - UnannotatedAttributeError, -) - - -# This is used at least twice, so cache it here. -_obj_setattr = object.__setattr__ -_init_converter_pat = "__attr_converter_{}" -_init_factory_pat = "__attr_factory_{}" -_tuple_property_pat = ( - " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" -) -_classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") -# we don't use a double-underscore prefix because that triggers -# name mangling when trying to create a slot for the field -# (when slots=True) -_hash_cache_field = "_attrs_cached_hash" - -_empty_metadata_singleton = metadata_proxy({}) - -# Unique object for unequivocal getattr() defaults. -_sentinel = object() - - -class _Nothing(object): - """ - Sentinel class to indicate the lack of a value when ``None`` is ambiguous. - - ``_Nothing`` is a singleton. There is only ever one of it. - """ - - _singleton = None - - def __new__(cls): - if _Nothing._singleton is None: - _Nothing._singleton = super(_Nothing, cls).__new__(cls) - return _Nothing._singleton - - def __repr__(self): - return "NOTHING" - - -NOTHING = _Nothing() -""" -Sentinel to indicate the lack of a value when ``None`` is ambiguous. -""" - - -def attrib( - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=None, - init=True, - metadata=None, - type=None, - converter=None, - factory=None, - kw_only=False, - eq=None, - order=None, -): - """ - Create a new attribute on a class. - - .. warning:: - - Does *not* do anything unless the class is also decorated with - `attr.s`! - - :param default: A value that is used if an ``attrs``-generated ``__init__`` - is used and no value is passed while instantiating or the attribute is - excluded using ``init=False``. - - If the value is an instance of `Factory`, its callable will be - used to construct a new value (useful for mutable data types like lists - or dicts). - - If a default is not set (or set manually to ``attr.NOTHING``), a value - *must* be supplied when instantiating; otherwise a `TypeError` - will be raised. - - The default can also be set using decorator notation as shown below. - - :type default: Any value - - :param callable factory: Syntactic sugar for - ``default=attr.Factory(callable)``. - - :param validator: `callable` that is called by ``attrs``-generated - ``__init__`` methods after the instance has been initialized. They - receive the initialized instance, the `Attribute`, and the - passed value. - - The return value is *not* inspected so the validator has to throw an - exception itself. - - If a ``list`` is passed, its items are treated as validators and must - all pass. - - Validators can be globally disabled and re-enabled using - `get_run_validators`. - - The validator can also be set using decorator notation as shown below. - - :type validator: ``callable`` or a ``list`` of ``callable``\\ s. - - :param repr: Include this attribute in the generated ``__repr__`` - method. If ``True``, include the attribute; if ``False``, omit it. By - default, the built-in ``repr()`` function is used. To override how the - attribute value is formatted, pass a ``callable`` that takes a single - value and returns a string. Note that the resulting string is used - as-is, i.e. it will be used directly *instead* of calling ``repr()`` - (the default). - :type repr: a ``bool`` or a ``callable`` to use a custom function. - :param bool eq: If ``True`` (default), include this attribute in the - generated ``__eq__`` and ``__ne__`` methods that check two instances - for equality. - :param bool order: If ``True`` (default), include this attributes in the - generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. - :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, - order=True``. Deprecated in favor of *eq* and *order*. - :param hash: Include this attribute in the generated ``__hash__`` - method. If ``None`` (default), mirror *eq*'s value. This is the - correct behavior according the Python spec. Setting this value to - anything else than ``None`` is *discouraged*. - :type hash: ``bool`` or ``None`` - :param bool init: Include this attribute in the generated ``__init__`` - method. It is possible to set this to ``False`` and set a default - value. In that case this attributed is unconditionally initialized - with the specified default value or factory. - :param callable converter: `callable` that is called by - ``attrs``-generated ``__init__`` methods to converter attribute's value - to the desired format. It is given the passed-in value, and the - returned value will be used as the new value of the attribute. The - value is converted before being passed to the validator, if any. - :param metadata: An arbitrary mapping, to be used by third-party - components. See `extending_metadata`. - :param type: The type of the attribute. In Python 3.6 or greater, the - preferred method to specify the type is using a variable annotation - (see `PEP 526 `_). - This argument is provided for backward compatibility. - Regardless of the approach used, the type will be stored on - ``Attribute.type``. - - Please note that ``attrs`` doesn't do anything with this metadata by - itself. You can use it as part of your own code or for - `static type checking `. - :param kw_only: Make this attribute keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - - .. versionadded:: 15.2.0 *convert* - .. versionadded:: 16.3.0 *metadata* - .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. - .. versionchanged:: 17.1.0 - *hash* is ``None`` and therefore mirrors *eq* by default. - .. versionadded:: 17.3.0 *type* - .. deprecated:: 17.4.0 *convert* - .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated - *convert* to achieve consistency with other noun-based arguments. - .. versionadded:: 18.1.0 - ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. - .. versionadded:: 18.2.0 *kw_only* - .. versionchanged:: 19.2.0 *convert* keyword argument removed - .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - """ - eq, order = _determine_eq_order(cmp, eq, order) - - if hash is not None and hash is not True and hash is not False: - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - - if factory is not None: - if default is not NOTHING: - raise ValueError( - "The `default` and `factory` arguments are mutually " - "exclusive." - ) - if not callable(factory): - raise ValueError("The `factory` argument must be a callable.") - default = Factory(factory) - - if metadata is None: - metadata = {} - - return _CountingAttr( - default=default, - validator=validator, - repr=repr, - cmp=None, - hash=hash, - init=init, - converter=converter, - metadata=metadata, - type=type, - kw_only=kw_only, - eq=eq, - order=order, - ) - - -def _make_attr_tuple_class(cls_name, attr_names): - """ - Create a tuple subclass to hold `Attribute`s for an `attrs` class. - - The subclass is a bare tuple with properties for names. - - class MyClassAttributes(tuple): - __slots__ = () - x = property(itemgetter(0)) - """ - attr_class_name = "{}Attributes".format(cls_name) - attr_class_template = [ - "class {}(tuple):".format(attr_class_name), - " __slots__ = ()", - ] - if attr_names: - for i, attr_name in enumerate(attr_names): - attr_class_template.append( - _tuple_property_pat.format(index=i, attr_name=attr_name) - ) - else: - attr_class_template.append(" pass") - globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} - eval(compile("\n".join(attr_class_template), "", "exec"), globs) - - return globs[attr_class_name] - - -# Tuple class for extracted attributes from a class definition. -# `base_attrs` is a subset of `attrs`. -_Attributes = _make_attr_tuple_class( - "_Attributes", - [ - # all attributes to build dunder methods for - "attrs", - # attributes that have been inherited - "base_attrs", - # map inherited attributes to their originating classes - "base_attrs_map", - ], -) - - -def _is_class_var(annot): - """ - Check whether *annot* is a typing.ClassVar. - - The string comparison hack is used to avoid evaluating all string - annotations which would put attrs-based classes at a performance - disadvantage compared to plain old classes. - """ - return str(annot).startswith(_classvar_prefixes) - - -def _get_annotations(cls): - """ - Get annotations for *cls*. - """ - anns = getattr(cls, "__annotations__", None) - if anns is None: - return {} - - # Verify that the annotations aren't merely inherited. - for base_cls in cls.__mro__[1:]: - if anns is getattr(base_cls, "__annotations__", None): - return {} - - return anns - - -def _counter_getter(e): - """ - Key function for sorting to avoid re-creating a lambda for every class. - """ - return e[1].counter - - -def _transform_attrs(cls, these, auto_attribs, kw_only): - """ - Transform all `_CountingAttr`s on a class into `Attribute`s. - - If *these* is passed, use that and don't look for them on the class. - - Return an `_Attributes`. - """ - cd = cls.__dict__ - anns = _get_annotations(cls) - - if these is not None: - ca_list = [(name, ca) for name, ca in iteritems(these)] - - if not isinstance(these, ordered_dict): - ca_list.sort(key=_counter_getter) - elif auto_attribs is True: - ca_names = { - name - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - } - ca_list = [] - annot_names = set() - for attr_name, type in anns.items(): - if _is_class_var(type): - continue - annot_names.add(attr_name) - a = cd.get(attr_name, NOTHING) - if not isinstance(a, _CountingAttr): - if a is NOTHING: - a = attrib() - else: - a = attrib(default=a) - ca_list.append((attr_name, a)) - - unannotated = ca_names - annot_names - if len(unannotated) > 0: - raise UnannotatedAttributeError( - "The following `attr.ib`s lack a type annotation: " - + ", ".join( - sorted(unannotated, key=lambda n: cd.get(n).counter) - ) - + "." - ) - else: - ca_list = sorted( - ( - (name, attr) - for name, attr in cd.items() - if isinstance(attr, _CountingAttr) - ), - key=lambda e: e[1].counter, - ) - - own_attrs = [ - Attribute.from_counting_attr( - name=attr_name, ca=ca, type=anns.get(attr_name) - ) - for attr_name, ca in ca_list - ] - - base_attrs = [] - base_attr_map = {} # A dictionary of base attrs to their classes. - taken_attr_names = {a.name: a for a in own_attrs} - - # Traverse the MRO and collect attributes. - for base_cls in cls.__mro__[1:-1]: - sub_attrs = getattr(base_cls, "__attrs_attrs__", None) - if sub_attrs is not None: - for a in sub_attrs: - prev_a = taken_attr_names.get(a.name) - # Only add an attribute if it hasn't been defined before. This - # allows for overwriting attribute definitions by subclassing. - if prev_a is None: - base_attrs.append(a) - taken_attr_names[a.name] = a - base_attr_map[a.name] = base_cls - - attr_names = [a.name for a in base_attrs + own_attrs] - - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - - if kw_only: - own_attrs = [a._assoc(kw_only=True) for a in own_attrs] - base_attrs = [a._assoc(kw_only=True) for a in base_attrs] - - attrs = AttrsClass(base_attrs + own_attrs) - - # Mandatory vs non-mandatory attr order only matters when they are part of - # the __init__ signature and when they aren't kw_only (which are moved to - # the end and can be mandatory or non-mandatory in any order, as they will - # be specified as keyword args anyway). Check the order of those attrs: - had_default = False - for a in (a for a in attrs if a.init is not False and a.kw_only is False): - if had_default is True and a.default is NOTHING: - raise ValueError( - "No mandatory attributes allowed after an attribute with a " - "default value or factory. Attribute in question: %r" % (a,) - ) - - if had_default is False and a.default is not NOTHING: - had_default = True - - return _Attributes((attrs, base_attrs, base_attr_map)) - - -def _frozen_setattrs(self, name, value): - """ - Attached to frozen classes as __setattr__. - """ - raise FrozenInstanceError() - - -def _frozen_delattrs(self, name): - """ - Attached to frozen classes as __delattr__. - """ - raise FrozenInstanceError() - - -class _ClassBuilder(object): - """ - Iteratively build *one* class. - """ - - __slots__ = ( - "_cls", - "_cls_dict", - "_attrs", - "_base_names", - "_attr_names", - "_slots", - "_frozen", - "_weakref_slot", - "_cache_hash", - "_has_post_init", - "_delete_attribs", - "_base_attr_map", - "_is_exc", - ) - - def __init__( - self, - cls, - these, - slots, - frozen, - weakref_slot, - auto_attribs, - kw_only, - cache_hash, - is_exc, - ): - attrs, base_attrs, base_map = _transform_attrs( - cls, these, auto_attribs, kw_only - ) - - self._cls = cls - self._cls_dict = dict(cls.__dict__) if slots else {} - self._attrs = attrs - self._base_names = set(a.name for a in base_attrs) - self._base_attr_map = base_map - self._attr_names = tuple(a.name for a in attrs) - self._slots = slots - self._frozen = frozen or _has_frozen_base_class(cls) - self._weakref_slot = weakref_slot - self._cache_hash = cache_hash - self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) - self._delete_attribs = not bool(these) - self._is_exc = is_exc - - self._cls_dict["__attrs_attrs__"] = self._attrs - - if frozen: - self._cls_dict["__setattr__"] = _frozen_setattrs - self._cls_dict["__delattr__"] = _frozen_delattrs - - def __repr__(self): - return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) - - def build_class(self): - """ - Finalize class based on the accumulated configuration. - - Builder cannot be used after calling this method. - """ - if self._slots is True: - return self._create_slots_class() - else: - return self._patch_original_class() - - def _patch_original_class(self): - """ - Apply accumulated methods and return the class. - """ - cls = self._cls - base_names = self._base_names - - # Clean class of attribute definitions (`attr.ib()`s). - if self._delete_attribs: - for name in self._attr_names: - if ( - name not in base_names - and getattr(cls, name, _sentinel) is not _sentinel - ): - try: - delattr(cls, name) - except AttributeError: - # This can happen if a base class defines a class - # variable and we want to set an attribute with the - # same name by using only a type annotation. - pass - - # Attach our dunder methods. - for name, value in self._cls_dict.items(): - setattr(cls, name, value) - - # Attach __setstate__. This is necessary to clear the hash code - # cache on deserialization. See issue - # https://github.com/python-attrs/attrs/issues/482 . - # Note that this code only handles setstate for dict classes. - # For slotted classes, see similar code in _create_slots_class . - if self._cache_hash: - existing_set_state_method = getattr(cls, "__setstate__", None) - if existing_set_state_method: - raise NotImplementedError( - "Currently you cannot use hash caching if " - "you specify your own __setstate__ method." - "See https://github.com/python-attrs/attrs/issues/494 ." - ) - - def cache_hash_set_state(chss_self, _): - # clear hash code cache - setattr(chss_self, _hash_cache_field, None) - - setattr(cls, "__setstate__", cache_hash_set_state) - - return cls - - def _create_slots_class(self): - """ - Build and return a new class with a `__slots__` attribute. - """ - base_names = self._base_names - cd = { - k: v - for k, v in iteritems(self._cls_dict) - if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") - } - - weakref_inherited = False - - # Traverse the MRO to check for an existing __weakref__. - for base_cls in self._cls.__mro__[1:-1]: - if "__weakref__" in getattr(base_cls, "__dict__", ()): - weakref_inherited = True - break - - names = self._attr_names - if ( - self._weakref_slot - and "__weakref__" not in getattr(self._cls, "__slots__", ()) - and "__weakref__" not in names - and not weakref_inherited - ): - names += ("__weakref__",) - - # We only add the names of attributes that aren't inherited. - # Settings __slots__ to inherited attributes wastes memory. - slot_names = [name for name in names if name not in base_names] - if self._cache_hash: - slot_names.append(_hash_cache_field) - cd["__slots__"] = tuple(slot_names) - - qualname = getattr(self._cls, "__qualname__", None) - if qualname is not None: - cd["__qualname__"] = qualname - - # __weakref__ is not writable. - state_attr_names = tuple( - an for an in self._attr_names if an != "__weakref__" - ) - - def slots_getstate(self): - """ - Automatically created by attrs. - """ - return tuple(getattr(self, name) for name in state_attr_names) - - hash_caching_enabled = self._cache_hash - - def slots_setstate(self, state): - """ - Automatically created by attrs. - """ - __bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in zip(state_attr_names, state): - __bound_setattr(name, value) - # Clearing the hash code cache on deserialization is needed - # because hash codes can change from run to run. See issue - # https://github.com/python-attrs/attrs/issues/482 . - # Note that this code only handles setstate for slotted classes. - # For dict classes, see similar code in _patch_original_class . - if hash_caching_enabled: - __bound_setattr(_hash_cache_field, None) - - # slots and frozen require __getstate__/__setstate__ to work - cd["__getstate__"] = slots_getstate - cd["__setstate__"] = slots_setstate - - # Create new class based on old class and our methods. - cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) - - # The following is a fix for - # https://github.com/python-attrs/attrs/issues/102. On Python 3, - # if a method mentions `__class__` or uses the no-arg super(), the - # compiler will bake a reference to the class in the method itself - # as `method.__closure__`. Since we replace the class with a - # clone, we rewrite these references so it keeps working. - for item in cls.__dict__.values(): - if isinstance(item, (classmethod, staticmethod)): - # Class- and staticmethods hide their functions inside. - # These might need to be rewritten as well. - closure_cells = getattr(item.__func__, "__closure__", None) - else: - closure_cells = getattr(item, "__closure__", None) - - if not closure_cells: # Catch None or the empty list. - continue - for cell in closure_cells: - if cell.cell_contents is self._cls: - set_closure_cell(cell, cls) - - return cls - - def add_repr(self, ns): - self._cls_dict["__repr__"] = self._add_method_dunders( - _make_repr(self._attrs, ns=ns) - ) - return self - - def add_str(self): - repr = self._cls_dict.get("__repr__") - if repr is None: - raise ValueError( - "__str__ can only be generated if a __repr__ exists." - ) - - def __str__(self): - return self.__repr__() - - self._cls_dict["__str__"] = self._add_method_dunders(__str__) - return self - - def make_unhashable(self): - self._cls_dict["__hash__"] = None - return self - - def add_hash(self): - self._cls_dict["__hash__"] = self._add_method_dunders( - _make_hash( - self._cls, - self._attrs, - frozen=self._frozen, - cache_hash=self._cache_hash, - ) - ) - - return self - - def add_init(self): - self._cls_dict["__init__"] = self._add_method_dunders( - _make_init( - self._cls, - self._attrs, - self._has_post_init, - self._frozen, - self._slots, - self._cache_hash, - self._base_attr_map, - self._is_exc, - ) - ) - - return self - - def add_eq(self): - cd = self._cls_dict - - cd["__eq__"], cd["__ne__"] = ( - self._add_method_dunders(meth) - for meth in _make_eq(self._cls, self._attrs) - ) - - return self - - def add_order(self): - cd = self._cls_dict - - cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( - self._add_method_dunders(meth) - for meth in _make_order(self._cls, self._attrs) - ) - - return self - - def _add_method_dunders(self, method): - """ - Add __module__ and __qualname__ to a *method* if possible. - """ - try: - method.__module__ = self._cls.__module__ - except AttributeError: - pass - - try: - method.__qualname__ = ".".join( - (self._cls.__qualname__, method.__name__) - ) - except AttributeError: - pass - - return method - - -_CMP_DEPRECATION = ( - "The usage of `cmp` is deprecated and will be removed on or after " - "2021-06-01. Please use `eq` and `order` instead." -) - - -def _determine_eq_order(cmp, eq, order): - """ - Validate the combination of *cmp*, *eq*, and *order*. Derive the effective - values of eq and order. - """ - if cmp is not None and any((eq is not None, order is not None)): - raise ValueError("Don't mix `cmp` with `eq' and `order`.") - - # cmp takes precedence due to bw-compatibility. - if cmp is not None: - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3) - - return cmp, cmp - - # If left None, equality is on and ordering mirrors equality. - if eq is None: - eq = True - - if order is None: - order = eq - - if eq is False and order is True: - raise ValueError("`order` can only be True if `eq` is True too.") - - return eq, order - - -def attrs( - maybe_cls=None, - these=None, - repr_ns=None, - repr=True, - cmp=None, - hash=None, - init=True, - slots=False, - frozen=False, - weakref_slot=True, - str=False, - auto_attribs=False, - kw_only=False, - cache_hash=False, - auto_exc=False, - eq=None, - order=None, -): - r""" - A class decorator that adds `dunder - `_\ -methods according to the - specified attributes using `attr.ib` or the *these* argument. - - :param these: A dictionary of name to `attr.ib` mappings. This is - useful to avoid the definition of your attributes within the class body - because you can't (e.g. if you want to add ``__repr__`` methods to - Django models) or don't want to. - - If *these* is not ``None``, ``attrs`` will *not* search the class body - for attributes and will *not* remove any attributes from it. - - If *these* is an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the attributes inside *these*. Otherwise the order - of the definition of the attributes is used. - - :type these: `dict` of `str` to `attr.ib` - - :param str repr_ns: When using nested classes, there's no way in Python 2 - to automatically detect that. Therefore it's possible to set the - namespace explicitly for a more meaningful ``repr`` output. - :param bool repr: Create a ``__repr__`` method with a human readable - representation of ``attrs`` attributes.. - :param bool str: Create a ``__str__`` method that is identical to - ``__repr__``. This is usually not necessary except for - `Exception`\ s. - :param bool eq: If ``True`` or ``None`` (default), add ``__eq__`` and - ``__ne__`` methods that check two instances for equality. - - They compare the instances as if they were tuples of their ``attrs`` - attributes, but only iff the types of both classes are *identical*! - :type eq: `bool` or `None` - :param bool order: If ``True``, add ``__lt__``, ``__le__``, ``__gt__``, - and ``__ge__`` methods that behave like *eq* above and allow instances - to be ordered. If ``None`` (default) mirror value of *eq*. - :type order: `bool` or `None` - :param cmp: Setting to ``True`` is equivalent to setting ``eq=True, - order=True``. Deprecated in favor of *eq* and *order*, has precedence - over them for backward-compatibility though. Must not be mixed with - *eq* or *order*. - :type cmp: `bool` or `None` - :param hash: If ``None`` (default), the ``__hash__`` method is generated - according how *eq* and *frozen* are set. - - 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. - 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to - None, marking it unhashable (which it is). - 3. If *eq* is False, ``__hash__`` will be left untouched meaning the - ``__hash__`` method of the base class will be used (if base class is - ``object``, this means it will fall back to id-based hashing.). - - Although not recommended, you can decide for yourself and force - ``attrs`` to create one (e.g. if the class is immutable even though you - didn't freeze it programmatically) by passing ``True`` or not. Both of - these cases are rather special and should be used carefully. - - See our documentation on `hashing`, Python's documentation on - `object.__hash__`, and the `GitHub issue that led to the default \ - behavior `_ for more - details. - :type hash: ``bool`` or ``None`` - :param bool init: Create a ``__init__`` method that initializes the - ``attrs`` attributes. Leading underscores are stripped for the - argument name. If a ``__attrs_post_init__`` method exists on the - class, it will be called after the class is fully initialized. - :param bool slots: Create a `slotted class ` that's more - memory-efficient. - :param bool frozen: Make instances immutable after initialization. If - someone attempts to modify a frozen instance, - `attr.exceptions.FrozenInstanceError` is raised. - - Please note: - - 1. This is achieved by installing a custom ``__setattr__`` method - on your class, so you can't implement your own. - - 2. True immutability is impossible in Python. - - 3. This *does* have a minor a runtime performance `impact - ` when initializing new instances. In other words: - ``__init__`` is slightly slower with ``frozen=True``. - - 4. If a class is frozen, you cannot modify ``self`` in - ``__attrs_post_init__`` or a self-written ``__init__``. You can - circumvent that limitation by using - ``object.__setattr__(self, "attribute_name", value)``. - - :param bool weakref_slot: Make instances weak-referenceable. This has no - effect unless ``slots`` is also enabled. - :param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes - (Python 3.6 and later only) from the class body. - - In this case, you **must** annotate every field. If ``attrs`` - encounters a field that is set to an `attr.ib` but lacks a type - annotation, an `attr.exceptions.UnannotatedAttributeError` is - raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't - want to set a type. - - If you assign a value to those attributes (e.g. ``x: int = 42``), that - value becomes the default value like if it were passed using - ``attr.ib(default=42)``. Passing an instance of `Factory` also - works as expected. - - Attributes annotated as `typing.ClassVar`, and attributes that are - neither annotated nor set to an `attr.ib` are **ignored**. - - .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ - :param bool kw_only: Make all attributes keyword-only (Python 3+) - in the generated ``__init__`` (if ``init`` is ``False``, this - parameter is ignored). - :param bool cache_hash: Ensure that the object's hash code is computed - only once and stored on the object. If this is set to ``True``, - hashing must be either explicitly or implicitly enabled for this - class. If the hash code is cached, avoid any reassignments of - fields involved in hash code computation or mutations of the objects - those fields point to after object creation. If such changes occur, - the behavior of the object's hash code is undefined. - :param bool auto_exc: If the class subclasses `BaseException` - (which implicitly includes any subclass of any exception), the - following happens to behave like a well-behaved Python exceptions - class: - - - the values for *eq*, *order*, and *hash* are ignored and the - instances compare and hash by the instance's ids (N.B. ``attrs`` will - *not* remove existing implementations of ``__hash__`` or the equality - methods. It just won't add own ones.), - - all attributes that are either passed into ``__init__`` or have a - default value are additionally available as a tuple in the ``args`` - attribute, - - the value of *str* is ignored leaving ``__str__`` to base classes. - - .. versionadded:: 16.0.0 *slots* - .. versionadded:: 16.1.0 *frozen* - .. versionadded:: 16.3.0 *str* - .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. - .. versionchanged:: 17.1.0 - *hash* supports ``None`` as value which is also the default now. - .. versionadded:: 17.3.0 *auto_attribs* - .. versionchanged:: 18.1.0 - If *these* is passed, no attributes are deleted from the class body. - .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. - .. versionadded:: 18.2.0 *weakref_slot* - .. deprecated:: 18.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a - `DeprecationWarning` if the classes compared are subclasses of - each other. ``__eq`` and ``__ne__`` never tried to compared subclasses - to each other. - .. versionchanged:: 19.2.0 - ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider - subclasses comparable anymore. - .. versionadded:: 18.2.0 *kw_only* - .. versionadded:: 18.2.0 *cache_hash* - .. versionadded:: 19.1.0 *auto_exc* - .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. - .. versionadded:: 19.2.0 *eq* and *order* - """ - eq, order = _determine_eq_order(cmp, eq, order) - - def wrap(cls): - - if getattr(cls, "__class__", None) is None: - raise TypeError("attrs only works with new-style classes.") - - is_exc = auto_exc is True and issubclass(cls, BaseException) - - builder = _ClassBuilder( - cls, - these, - slots, - frozen, - weakref_slot, - auto_attribs, - kw_only, - cache_hash, - is_exc, - ) - - if repr is True: - builder.add_repr(repr_ns) - if str is True: - builder.add_str() - if eq is True and not is_exc: - builder.add_eq() - if order is True and not is_exc: - builder.add_order() - - if hash is not True and hash is not False and hash is not None: - # Can't use `hash in` because 1 == True for example. - raise TypeError( - "Invalid value for hash. Must be True, False, or None." - ) - elif hash is False or (hash is None and eq is False) or is_exc: - # Don't do anything. Should fall back to __object__'s __hash__ - # which is by id. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - elif hash is True or (hash is None and eq is True and frozen is True): - # Build a __hash__ if told so, or if it's safe. - builder.add_hash() - else: - # Raise TypeError on attempts to hash. - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " hashing must be either explicitly or implicitly " - "enabled." - ) - builder.make_unhashable() - - if init is True: - builder.add_init() - else: - if cache_hash: - raise TypeError( - "Invalid value for cache_hash. To use hash caching," - " init must be True." - ) - - return builder.build_class() - - # maybe_cls's type depends on the usage of the decorator. It's a class - # if it's used as `@attrs` but ``None`` if used as `@attrs()`. - if maybe_cls is None: - return wrap - else: - return wrap(maybe_cls) - - -_attrs = attrs -""" -Internal alias so we can use it in functions that take an argument called -*attrs*. -""" - - -if PY2: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return ( - getattr(cls.__setattr__, "__module__", None) - == _frozen_setattrs.__module__ - and cls.__setattr__.__name__ == _frozen_setattrs.__name__ - ) - - -else: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ == _frozen_setattrs - - -def _attrs_to_tuple(obj, attrs): - """ - Create a tuple of all values of *obj*'s *attrs*. - """ - return tuple(getattr(obj, a.name) for a in attrs) - - -def _generate_unique_filename(cls, func_name): - """ - Create a "filename" suitable for a function being generated. - """ - unique_id = uuid.uuid4() - extra = "" - count = 1 - - while True: - unique_filename = "".format( - func_name, - cls.__module__, - getattr(cls, "__qualname__", cls.__name__), - extra, - ) - # To handle concurrency we essentially "reserve" our spot in - # the linecache with a dummy line. The caller can then - # set this value correctly. - cache_line = (1, None, (str(unique_id),), unique_filename) - if ( - linecache.cache.setdefault(unique_filename, cache_line) - == cache_line - ): - return unique_filename - - # Looks like this spot is taken. Try again. - count += 1 - extra = "-{0}".format(count) - - -def _make_hash(cls, attrs, frozen, cache_hash): - attrs = tuple( - a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) - ) - - tab = " " - - unique_filename = _generate_unique_filename(cls, "hash") - type_hash = hash(unique_filename) - - method_lines = ["def __hash__(self):"] - - def append_hash_computation_lines(prefix, indent): - """ - Generate the code for actually computing the hash code. - Below this will either be returned directly or used to compute - a value which is then cached, depending on the value of cache_hash - """ - method_lines.extend( - [indent + prefix + "hash((", indent + " %d," % (type_hash,)] - ) - - for a in attrs: - method_lines.append(indent + " self.%s," % a.name) - - method_lines.append(indent + " ))") - - if cache_hash: - method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) - if frozen: - append_hash_computation_lines( - "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab * 2 + ")") # close __setattr__ - else: - append_hash_computation_lines( - "self.%s = " % _hash_cache_field, tab * 2 - ) - method_lines.append(tab + "return self.%s" % _hash_cache_field) - else: - append_hash_computation_lines("return ", tab) - - script = "\n".join(method_lines) - globs = {} - locs = {} - bytecode = compile(script, unique_filename, "exec") - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - - return locs["__hash__"] - - -def _add_hash(cls, attrs): - """ - Add a hash method to *cls*. - """ - cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) - return cls - - -def __ne__(self, other): - """ - Check equality and either forward a NotImplemented or return the result - negated. - """ - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - - return not result - - -def _make_eq(cls, attrs): - attrs = [a for a in attrs if a.eq] - - unique_filename = _generate_unique_filename(cls, "eq") - lines = [ - "def __eq__(self, other):", - " if other.__class__ is not self.__class__:", - " return NotImplemented", - ] - # We can't just do a big self.x = other.x and... clause due to - # irregularities like nan == nan is false but (nan,) == (nan,) is true. - if attrs: - lines.append(" return (") - others = [" ) == ("] - for a in attrs: - lines.append(" self.%s," % (a.name,)) - others.append(" other.%s," % (a.name,)) - - lines += others + [" )"] - else: - lines.append(" return True") - - script = "\n".join(lines) - globs = {} - locs = {} - bytecode = compile(script, unique_filename, "exec") - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - return locs["__eq__"], __ne__ - - -def _make_order(cls, attrs): - attrs = [a for a in attrs if a.order] - - def attrs_to_tuple(obj): - """ - Save us some typing. - """ - return _attrs_to_tuple(obj, attrs) - - def __lt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) < attrs_to_tuple(other) - - return NotImplemented - - def __le__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) <= attrs_to_tuple(other) - - return NotImplemented - - def __gt__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) > attrs_to_tuple(other) - - return NotImplemented - - def __ge__(self, other): - """ - Automatically created by attrs. - """ - if other.__class__ is self.__class__: - return attrs_to_tuple(self) >= attrs_to_tuple(other) - - return NotImplemented - - return __lt__, __le__, __gt__, __ge__ - - -def _add_eq(cls, attrs=None): - """ - Add equality methods to *cls* with *attrs*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__eq__, cls.__ne__ = _make_eq(cls, attrs) - - return cls - - -_already_repring = threading.local() - - -def _make_repr(attrs, ns): - """ - Make a repr method that includes relevant *attrs*, adding *ns* to the full - name. - """ - - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom callable. - attr_names_with_reprs = tuple( - (a.name, repr if a.repr is True else a.repr) - for a in attrs - if a.repr is not False - ) - - def __repr__(self): - """ - Automatically created by attrs. - """ - try: - working_set = _already_repring.working_set - except AttributeError: - working_set = set() - _already_repring.working_set = working_set - - if id(self) in working_set: - return "..." - real_cls = self.__class__ - if ns is None: - qualname = getattr(real_cls, "__qualname__", None) - if qualname is not None: - class_name = qualname.rsplit(">.", 1)[-1] - else: - class_name = real_cls.__name__ - else: - class_name = ns + "." + real_cls.__name__ - - # Since 'self' remains on the stack (i.e.: strongly referenced) for the - # duration of this call, it's safe to depend on id(...) stability, and - # not need to track the instance and therefore worry about properties - # like weakref- or hash-ability. - working_set.add(id(self)) - try: - result = [class_name, "("] - first = True - for name, attr_repr in attr_names_with_reprs: - if first: - first = False - else: - result.append(", ") - result.extend( - (name, "=", attr_repr(getattr(self, name, NOTHING))) - ) - return "".join(result) + ")" - finally: - working_set.remove(id(self)) - - return __repr__ - - -def _add_repr(cls, ns=None, attrs=None): - """ - Add a repr method to *cls*. - """ - if attrs is None: - attrs = cls.__attrs_attrs__ - - cls.__repr__ = _make_repr(attrs, ns) - return cls - - -def _make_init( - cls, attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc -): - attrs = [a for a in attrs if a.init or a.default is not NOTHING] - - unique_filename = _generate_unique_filename(cls, "init") - - script, globs, annotations = _attrs_to_init_script( - attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc - ) - locs = {} - bytecode = compile(script, unique_filename, "exec") - attr_dict = dict((a.name, a) for a in attrs) - globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) - - if frozen is True: - # Save the lookup overhead in __init__ if we need to circumvent - # immutability. - globs["_cached_setattr"] = _obj_setattr - - eval(bytecode, globs, locs) - - # In order of debuggers like PDB being able to step through the code, - # we add a fake linecache entry. - linecache.cache[unique_filename] = ( - len(script), - None, - script.splitlines(True), - unique_filename, - ) - - __init__ = locs["__init__"] - __init__.__annotations__ = annotations - - return __init__ - - -def fields(cls): - """ - Return the tuple of ``attrs`` attributes for a class. - - The tuple also allows accessing the fields by their names (see below for - examples). - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: tuple (with name accessors) of `attr.Attribute` - - .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields - by name. - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return attrs - - -def fields_dict(cls): - """ - Return an ordered dictionary of ``attrs`` attributes for a class, whose - keys are the attribute names. - - :param type cls: Class to introspect. - - :raise TypeError: If *cls* is not a class. - :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` - class. - - :rtype: an ordered dict where keys are attribute names and values are - `attr.Attribute`\\ s. This will be a `dict` if it's - naturally ordered like on Python 3.6+ or an - :class:`~collections.OrderedDict` otherwise. - - .. versionadded:: 18.1.0 - """ - if not isclass(cls): - raise TypeError("Passed object must be a class.") - attrs = getattr(cls, "__attrs_attrs__", None) - if attrs is None: - raise NotAnAttrsClassError( - "{cls!r} is not an attrs-decorated class.".format(cls=cls) - ) - return ordered_dict(((a.name, a) for a in attrs)) - - -def validate(inst): - """ - Validate all attributes on *inst* that have a validator. - - Leaves all exceptions through. - - :param inst: Instance of a class with ``attrs`` attributes. - """ - if _config._run_validators is False: - return - - for a in fields(inst.__class__): - v = a.validator - if v is not None: - v(inst, a, getattr(inst, a.name)) - - -def _is_slot_cls(cls): - return "__slots__" in cls.__dict__ - - -def _is_slot_attr(a_name, base_attr_map): - """ - Check if the attribute name comes from a slot class. - """ - return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) - - -def _attrs_to_init_script( - attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc -): - """ - Return a script of an initializer for *attrs* and a dict of globals. - - The globals are expected by the generated script. - - If *frozen* is True, we cannot set the attributes directly so we use - a cached ``object.__setattr__``. - """ - lines = [] - any_slot_ancestors = any( - _is_slot_attr(a.name, base_attr_map) for a in attrs - ) - if frozen is True: - if slots is True: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - - def fmt_setter(attr_name, value_var): - return "_setattr('%(attr_name)s', %(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - } - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) - return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % { - "attr_name": attr_name, - "value_var": value_var, - "conv": conv_name, - } - - else: - # Dict frozen classes assign directly to __dict__. - # But only if the attribute doesn't come from an ancestor slot - # class. - # Note _inst_dict will be used again below if cache_hash is True - lines.append("_inst_dict = self.__dict__") - if any_slot_ancestors: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup - # per assignment. - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - - def fmt_setter(attr_name, value_var): - if _is_slot_attr(attr_name, base_attr_map): - res = "_setattr('%(attr_name)s', %(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - } - else: - res = "_inst_dict['%(attr_name)s'] = %(value_var)s" % { - "attr_name": attr_name, - "value_var": value_var, - } - return res - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) - if _is_slot_attr(attr_name, base_attr_map): - tmpl = "_setattr('%(attr_name)s', %(c)s(%(value_var)s))" - else: - tmpl = "_inst_dict['%(attr_name)s'] = %(c)s(%(value_var)s)" - return tmpl % { - "attr_name": attr_name, - "value_var": value_var, - "c": conv_name, - } - - else: - # Not frozen. - def fmt_setter(attr_name, value): - return "self.%(attr_name)s = %(value)s" % { - "attr_name": attr_name, - "value": value, - } - - def fmt_setter_with_converter(attr_name, value_var): - conv_name = _init_converter_pat.format(attr_name) - return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % { - "attr_name": attr_name, - "value_var": value_var, - "conv": conv_name, - } - - args = [] - kw_only_args = [] - attrs_to_validate = [] - - # This is a dictionary of names to validator and converter callables. - # Injecting this into __init__ globals lets us avoid lookups. - names_for_globals = {} - annotations = {"return": None} - - for a in attrs: - if a.validator: - attrs_to_validate.append(a) - attr_name = a.name - arg_name = a.name.lstrip("_") - has_factory = isinstance(a.default, Factory) - if has_factory and a.default.takes_self: - maybe_self = "self" - else: - maybe_self = "" - if a.init is False: - if has_factory: - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - init_factory_name + "({0})".format(maybe_self), - ) - ) - conv_name = _init_converter_pat.format(a.name) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - init_factory_name + "({0})".format(maybe_self), - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.converter is not None: - lines.append( - fmt_setter_with_converter( - attr_name, - "attr_dict['{attr_name}'].default".format( - attr_name=attr_name - ), - ) - ) - conv_name = _init_converter_pat.format(a.name) - names_for_globals[conv_name] = a.converter - else: - lines.append( - fmt_setter( - attr_name, - "attr_dict['{attr_name}'].default".format( - attr_name=attr_name - ), - ) - ) - elif a.default is not NOTHING and not has_factory: - arg = "{arg_name}=attr_dict['{attr_name}'].default".format( - arg_name=arg_name, attr_name=attr_name - ) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - if a.converter is not None: - lines.append(fmt_setter_with_converter(attr_name, arg_name)) - names_for_globals[ - _init_converter_pat.format(a.name) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name)) - elif has_factory: - arg = "{arg_name}=NOTHING".format(arg_name=arg_name) - if a.kw_only: - kw_only_args.append(arg) - else: - args.append(arg) - lines.append( - "if {arg_name} is not NOTHING:".format(arg_name=arg_name) - ) - init_factory_name = _init_factory_pat.format(a.name) - if a.converter is not None: - lines.append( - " " + fmt_setter_with_converter(attr_name, arg_name) - ) - lines.append("else:") - lines.append( - " " - + fmt_setter_with_converter( - attr_name, - init_factory_name + "({0})".format(maybe_self), - ) - ) - names_for_globals[ - _init_converter_pat.format(a.name) - ] = a.converter - else: - lines.append(" " + fmt_setter(attr_name, arg_name)) - lines.append("else:") - lines.append( - " " - + fmt_setter( - attr_name, - init_factory_name + "({0})".format(maybe_self), - ) - ) - names_for_globals[init_factory_name] = a.default.factory - else: - if a.kw_only: - kw_only_args.append(arg_name) - else: - args.append(arg_name) - if a.converter is not None: - lines.append(fmt_setter_with_converter(attr_name, arg_name)) - names_for_globals[ - _init_converter_pat.format(a.name) - ] = a.converter - else: - lines.append(fmt_setter(attr_name, arg_name)) - - if a.init is True and a.converter is None and a.type is not None: - annotations[arg_name] = a.type - - if attrs_to_validate: # we can skip this if there are no validators. - names_for_globals["_config"] = _config - lines.append("if _config._run_validators is True:") - for a in attrs_to_validate: - val_name = "__attr_validator_{}".format(a.name) - attr_name = "__attr_{}".format(a.name) - lines.append( - " {}(self, {}, self.{})".format(val_name, attr_name, a.name) - ) - names_for_globals[val_name] = a.validator - names_for_globals[attr_name] = a - if post_init: - lines.append("self.__attrs_post_init__()") - - # because this is set only after __attrs_post_init is called, a crash - # will result if post-init tries to access the hash code. This seemed - # preferable to setting this beforehand, in which case alteration to - # field values during post-init combined with post-init accessing the - # hash code would result in silent bugs. - if cache_hash: - if frozen: - if slots: - # if frozen and slots, then _setattr defined above - init_hash_cache = "_setattr('%s', %s)" - else: - # if frozen and not slots, then _inst_dict defined above - init_hash_cache = "_inst_dict['%s'] = %s" - else: - init_hash_cache = "self.%s = %s" - lines.append(init_hash_cache % (_hash_cache_field, "None")) - - # For exceptions we rely on BaseException.__init__ for proper - # initialization. - if is_exc: - vals = ",".join("self." + a.name for a in attrs if a.init) - - lines.append("BaseException.__init__(self, %s)" % (vals,)) - - args = ", ".join(args) - if kw_only_args: - if PY2: - raise PythonTooOldError( - "Keyword-only arguments only work on Python 3 and later." - ) - - args += "{leading_comma}*, {kw_only_args}".format( - leading_comma=", " if args else "", - kw_only_args=", ".join(kw_only_args), - ) - return ( - """\ -def __init__(self, {args}): - {lines} -""".format( - args=args, lines="\n ".join(lines) if lines else "pass" - ), - names_for_globals, - annotations, - ) - - -class Attribute(object): - """ - *Read-only* representation of an attribute. - - :attribute name: The name of the attribute. - - Plus *all* arguments of `attr.ib` (except for ``factory`` - which is only syntactic sugar for ``default=Factory(...)``. - - For the version history of the fields, see `attr.ib`. - """ - - __slots__ = ( - "name", - "default", - "validator", - "repr", - "eq", - "order", - "hash", - "init", - "metadata", - "type", - "converter", - "kw_only", - ) - - def __init__( - self, - name, - default, - validator, - repr, - cmp, # XXX: unused, remove along with other cmp code. - hash, - init, - metadata=None, - type=None, - converter=None, - kw_only=False, - eq=None, - order=None, - ): - eq, order = _determine_eq_order(cmp, eq, order) - - # Cache this descriptor here to speed things up later. - bound_setattr = _obj_setattr.__get__(self, Attribute) - - # Despite the big red warning, people *do* instantiate `Attribute` - # themselves. - bound_setattr("name", name) - bound_setattr("default", default) - bound_setattr("validator", validator) - bound_setattr("repr", repr) - bound_setattr("eq", eq) - bound_setattr("order", order) - bound_setattr("hash", hash) - bound_setattr("init", init) - bound_setattr("converter", converter) - bound_setattr( - "metadata", - ( - metadata_proxy(metadata) - if metadata - else _empty_metadata_singleton - ), - ) - bound_setattr("type", type) - bound_setattr("kw_only", kw_only) - - def __setattr__(self, name, value): - raise FrozenInstanceError() - - @classmethod - def from_counting_attr(cls, name, ca, type=None): - # type holds the annotated value. deal with conflicts: - if type is None: - type = ca.type - elif ca.type is not None: - raise ValueError( - "Type annotation and type argument cannot both be present" - ) - inst_dict = { - k: getattr(ca, k) - for k in Attribute.__slots__ - if k - not in ( - "name", - "validator", - "default", - "type", - ) # exclude methods and deprecated alias - } - return cls( - name=name, - validator=ca._validator, - default=ca._default, - type=type, - cmp=None, - **inst_dict - ) - - @property - def cmp(self): - """ - Simulate the presence of a cmp attribute and warn. - """ - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) - - return self.eq and self.order - - # Don't use attr.assoc since fields(Attribute) doesn't work - def _assoc(self, **changes): - """ - Copy *self* and apply *changes*. - """ - new = copy.copy(self) - - new._setattrs(changes.items()) - - return new - - # Don't use _add_pickle since fields(Attribute) doesn't work - def __getstate__(self): - """ - Play nice with pickle. - """ - return tuple( - getattr(self, name) if name != "metadata" else dict(self.metadata) - for name in self.__slots__ - ) - - def __setstate__(self, state): - """ - Play nice with pickle. - """ - self._setattrs(zip(self.__slots__, state)) - - def _setattrs(self, name_values_pairs): - bound_setattr = _obj_setattr.__get__(self, Attribute) - for name, value in name_values_pairs: - if name != "metadata": - bound_setattr(name, value) - else: - bound_setattr( - name, - metadata_proxy(value) - if value - else _empty_metadata_singleton, - ) - - -_a = [ - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - eq=True, - order=False, - hash=(name != "metadata"), - init=True, - ) - for name in Attribute.__slots__ -] - -Attribute = _add_hash( - _add_eq(_add_repr(Attribute, attrs=_a), attrs=_a), - attrs=[a for a in _a if a.hash], -) - - -class _CountingAttr(object): - """ - Intermediate representation of attributes that uses a counter to preserve - the order in which the attributes have been defined. - - *Internal* data structure of the attrs library. Running into is most - likely the result of a bug like a forgotten `@attr.s` decorator. - """ - - __slots__ = ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - "metadata", - "_validator", - "converter", - "type", - "kw_only", - ) - __attrs_attrs__ = tuple( - Attribute( - name=name, - default=NOTHING, - validator=None, - repr=True, - cmp=None, - hash=True, - init=True, - kw_only=False, - eq=True, - order=False, - ) - for name in ( - "counter", - "_default", - "repr", - "eq", - "order", - "hash", - "init", - ) - ) + ( - Attribute( - name="metadata", - default=None, - validator=None, - repr=True, - cmp=None, - hash=False, - init=True, - kw_only=False, - eq=True, - order=False, - ), - ) - cls_counter = 0 - - def __init__( - self, - default, - validator, - repr, - cmp, # XXX: unused, remove along with cmp - hash, - init, - converter, - metadata, - type, - kw_only, - eq, - order, - ): - _CountingAttr.cls_counter += 1 - self.counter = _CountingAttr.cls_counter - self._default = default - # If validator is a list/tuple, wrap it using helper validator. - if validator and isinstance(validator, (list, tuple)): - self._validator = and_(*validator) - else: - self._validator = validator - self.repr = repr - self.eq = eq - self.order = order - self.hash = hash - self.init = init - self.converter = converter - self.metadata = metadata - self.type = type - self.kw_only = kw_only - - def validator(self, meth): - """ - Decorator that adds *meth* to the list of validators. - - Returns *meth* unchanged. - - .. versionadded:: 17.1.0 - """ - if self._validator is None: - self._validator = meth - else: - self._validator = and_(self._validator, meth) - return meth - - def default(self, meth): - """ - Decorator that allows to set the default for an attribute. - - Returns *meth* unchanged. - - :raises DefaultAlreadySetError: If default has been set before. - - .. versionadded:: 17.1.0 - """ - if self._default is not NOTHING: - raise DefaultAlreadySetError() - - self._default = Factory(meth, takes_self=True) - - return meth - - -_CountingAttr = _add_eq(_add_repr(_CountingAttr)) - - -@attrs(slots=True, init=False, hash=True) -class Factory(object): - """ - Stores a factory callable. - - If passed as the default value to `attr.ib`, the factory is used to - generate a new value. - - :param callable factory: A callable that takes either none or exactly one - mandatory positional argument depending on *takes_self*. - :param bool takes_self: Pass the partially initialized instance that is - being initialized as a positional argument. - - .. versionadded:: 17.1.0 *takes_self* - """ - - factory = attrib() - takes_self = attrib() - - def __init__(self, factory, takes_self=False): - """ - `Factory` is part of the default machinery so if we want a default - value here, we have to implement it ourselves. - """ - self.factory = factory - self.takes_self = takes_self - - -def make_class(name, attrs, bases=(object,), **attributes_arguments): - """ - A quick way to create a new class called *name* with *attrs*. - - :param name: The name for the new class. - :type name: str - - :param attrs: A list of names or a dictionary of mappings of names to - attributes. - - If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, - `collections.OrderedDict` otherwise), the order is deduced from - the order of the names or attributes inside *attrs*. Otherwise the - order of the definition of the attributes is used. - :type attrs: `list` or `dict` - - :param tuple bases: Classes that the new class will subclass. - - :param attributes_arguments: Passed unmodified to `attr.s`. - - :return: A new class with *attrs*. - :rtype: type - - .. versionadded:: 17.1.0 *bases* - .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. - """ - if isinstance(attrs, dict): - cls_dict = attrs - elif isinstance(attrs, (list, tuple)): - cls_dict = dict((a, attrib()) for a in attrs) - else: - raise TypeError("attrs argument must be a dict or a list.") - - post_init = cls_dict.pop("__attrs_post_init__", None) - type_ = type( - name, - bases, - {} if post_init is None else {"__attrs_post_init__": post_init}, - ) - # For pickling to work, the __module__ variable needs to be set to the - # frame where the class is created. Bypass this step in environments where - # sys._getframe is not defined (Jython for example) or sys._getframe is not - # defined for arguments greater than 0 (IronPython). - try: - type_.__module__ = sys._getframe(1).f_globals.get( - "__name__", "__main__" - ) - except (AttributeError, ValueError): - pass - - # We do it here for proper warnings with meaningful stacklevel. - cmp = attributes_arguments.pop("cmp", None) - attributes_arguments["eq"], attributes_arguments[ - "order" - ] = _determine_eq_order( - cmp, attributes_arguments.get("eq"), attributes_arguments.get("order") - ) - - return _attrs(these=cls_dict, **attributes_arguments)(type_) - - -# These are required by within this module so we define them here and merely -# import into .validators. - - -@attrs(slots=True, hash=True) -class _AndValidator(object): - """ - Compose many validators to a single one. - """ - - _validators = attrib() - - def __call__(self, inst, attr, value): - for v in self._validators: - v(inst, attr, value) - - -def and_(*validators): - """ - A validator that composes multiple validators into one. - - When called on a value, it runs all wrapped validators. - - :param validators: Arbitrary number of validators. - :type validators: callables - - .. versionadded:: 17.1.0 - """ - vals = [] - for validator in validators: - vals.extend( - validator._validators - if isinstance(validator, _AndValidator) - else [validator] - ) - - return _AndValidator(tuple(vals)) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.py deleted file mode 100644 index 014e78a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import absolute_import, division, print_function - -from functools import total_ordering - -from ._funcs import astuple -from ._make import attrib, attrs - - -@total_ordering -@attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo(object): - """ - A version object that can be compared to tuple of length 1--4: - - >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) - True - >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) - True - >>> vi = attr.VersionInfo(19, 2, 0, "final") - >>> vi < (19, 1, 1) - False - >>> vi < (19,) - False - >>> vi == (19, 2,) - True - >>> vi == (19, 2, 1) - False - - .. versionadded:: 19.2 - """ - - year = attrib(type=int) - minor = attrib(type=int) - micro = attrib(type=int) - releaselevel = attrib(type=str) - - @classmethod - def _from_version_string(cls, s): - """ - Parse *s* and return a _VersionInfo. - """ - v = s.split(".") - if len(v) == 3: - v.append("final") - - return cls( - year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] - ) - - def _ensure_tuple(self, other): - """ - Ensure *other* is a tuple of a valid length. - - Returns a possibly transformed *other* and ourselves as a tuple of - the same length as *other*. - """ - - if self.__class__ is other.__class__: - other = astuple(other) - - if not isinstance(other, tuple): - raise NotImplementedError - - if not (1 <= len(other) <= 4): - raise NotImplementedError - - return astuple(self)[: len(other)], other - - def __eq__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - return us == them - - def __lt__(self, other): - try: - us, them = self._ensure_tuple(other) - except NotImplementedError: - return NotImplemented - - # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't - # have to do anything special with releaselevel for now. - return us < them diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.pyi deleted file mode 100644 index 45ced08..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/_version_info.pyi +++ /dev/null @@ -1,9 +0,0 @@ -class VersionInfo: - @property - def year(self) -> int: ... - @property - def minor(self) -> int: ... - @property - def micro(self) -> int: ... - @property - def releaselevel(self) -> str: ... diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.py deleted file mode 100644 index 8592897..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Commonly useful converters. -""" - -from __future__ import absolute_import, division, print_function - -from ._make import NOTHING, Factory - - -def optional(converter): - """ - A converter that allows an attribute to be optional. An optional attribute - is one which can be set to ``None``. - - :param callable converter: the converter that is used for non-``None`` - values. - - .. versionadded:: 17.1.0 - """ - - def optional_converter(val): - if val is None: - return None - return converter(val) - - return optional_converter - - -def default_if_none(default=NOTHING, factory=None): - """ - A converter that allows to replace ``None`` values by *default* or the - result of *factory*. - - :param default: Value to be used if ``None`` is passed. Passing an instance - of `attr.Factory` is supported, however the ``takes_self`` option - is *not*. - :param callable factory: A callable that takes not parameters whose result - is used if ``None`` is passed. - - :raises TypeError: If **neither** *default* or *factory* is passed. - :raises TypeError: If **both** *default* and *factory* are passed. - :raises ValueError: If an instance of `attr.Factory` is passed with - ``takes_self=True``. - - .. versionadded:: 18.2.0 - """ - if default is NOTHING and factory is None: - raise TypeError("Must pass either `default` or `factory`.") - - if default is not NOTHING and factory is not None: - raise TypeError( - "Must pass either `default` or `factory` but not both." - ) - - if factory is not None: - default = Factory(factory) - - if isinstance(default, Factory): - if default.takes_self: - raise ValueError( - "`takes_self` is not supported by default_if_none." - ) - - def default_if_none_converter(val): - if val is not None: - return val - - return default.factory() - - else: - - def default_if_none_converter(val): - if val is not None: - return val - - return default - - return default_if_none_converter diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.pyi deleted file mode 100644 index 63b2a38..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/converters.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import TypeVar, Optional, Callable, overload -from . import _ConverterType - -_T = TypeVar("_T") - -def optional( - converter: _ConverterType[_T] -) -> _ConverterType[Optional[_T]]: ... -@overload -def default_if_none(default: _T) -> _ConverterType[_T]: ... -@overload -def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType[_T]: ... diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.py deleted file mode 100644 index d1b7618..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.py +++ /dev/null @@ -1,74 +0,0 @@ -from __future__ import absolute_import, division, print_function - - -class FrozenInstanceError(AttributeError): - """ - A frozen/immutable instance has been attempted to be modified. - - It mirrors the behavior of ``namedtuples`` by using the same error message - and subclassing `AttributeError`. - - .. versionadded:: 16.1.0 - """ - - msg = "can't set attribute" - args = [msg] - - -class AttrsAttributeNotFoundError(ValueError): - """ - An ``attrs`` function couldn't find an attribute that the user asked for. - - .. versionadded:: 16.2.0 - """ - - -class NotAnAttrsClassError(ValueError): - """ - A non-``attrs`` class has been passed into an ``attrs`` function. - - .. versionadded:: 16.2.0 - """ - - -class DefaultAlreadySetError(RuntimeError): - """ - A default has been set using ``attr.ib()`` and is attempted to be reset - using the decorator. - - .. versionadded:: 17.1.0 - """ - - -class UnannotatedAttributeError(RuntimeError): - """ - A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type - annotation. - - .. versionadded:: 17.3.0 - """ - - -class PythonTooOldError(RuntimeError): - """ - An ``attrs`` feature requiring a more recent python version has been used. - - .. versionadded:: 18.2.0 - """ - - -class NotCallableError(TypeError): - """ - A ``attr.ib()`` requiring a callable has been set with a value - that is not callable. - - .. versionadded:: 19.2.0 - """ - - def __init__(self, msg, value): - super(TypeError, self).__init__(msg, value) - self.msg = msg - self.value = value - - def __str__(self): - return str(self.msg) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.pyi deleted file mode 100644 index 736fde2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/exceptions.pyi +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any - -class FrozenInstanceError(AttributeError): - msg: str = ... - -class AttrsAttributeNotFoundError(ValueError): ... -class NotAnAttrsClassError(ValueError): ... -class DefaultAlreadySetError(RuntimeError): ... -class UnannotatedAttributeError(RuntimeError): ... -class PythonTooOldError(RuntimeError): ... - -class NotCallableError(TypeError): - msg: str = ... - value: Any = ... - def __init__(self, msg: str, value: Any) -> None: ... diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.py deleted file mode 100644 index dc47e8f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Commonly useful filters for `attr.asdict`. -""" - -from __future__ import absolute_import, division, print_function - -from ._compat import isclass -from ._make import Attribute - - -def _split_what(what): - """ - Returns a tuple of `frozenset`s of classes and attributes. - """ - return ( - frozenset(cls for cls in what if isclass(cls)), - frozenset(cls for cls in what if isinstance(cls, Attribute)), - ) - - -def include(*what): - """ - Whitelist *what*. - - :param what: What to whitelist. - :type what: `list` of `type` or `attr.Attribute`\\ s - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def include_(attribute, value): - return value.__class__ in cls or attribute in attrs - - return include_ - - -def exclude(*what): - """ - Blacklist *what*. - - :param what: What to blacklist. - :type what: `list` of classes or `attr.Attribute`\\ s. - - :rtype: `callable` - """ - cls, attrs = _split_what(what) - - def exclude_(attribute, value): - return value.__class__ not in cls and attribute not in attrs - - return exclude_ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.pyi deleted file mode 100644 index 68368fe..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/filters.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from typing import Union, Any -from . import Attribute, _FilterType - -def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... -def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/py.typed b/.tox/py37-normal/lib/python3.7/site-packages/attr/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.py b/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.py deleted file mode 100644 index 839d310..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.py +++ /dev/null @@ -1,378 +0,0 @@ -""" -Commonly useful validators. -""" - -from __future__ import absolute_import, division, print_function - -import re - -from ._make import _AndValidator, and_, attrib, attrs -from .exceptions import NotCallableError - - -__all__ = [ - "and_", - "deep_iterable", - "deep_mapping", - "in_", - "instance_of", - "is_callable", - "matches_re", - "optional", - "provides", -] - - -@attrs(repr=False, slots=True, hash=True) -class _InstanceOfValidator(object): - type = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not isinstance(value, self.type): - raise TypeError( - "'{name}' must be {type!r} (got {value!r} that is a " - "{actual!r}).".format( - name=attr.name, - type=self.type, - actual=value.__class__, - value=value, - ), - attr, - self.type, - value, - ) - - def __repr__(self): - return "".format( - type=self.type - ) - - -def instance_of(type): - """ - A validator that raises a `TypeError` if the initializer is called - with a wrong type for this particular attribute (checks are performed using - `isinstance` therefore it's also valid to pass a tuple of types). - - :param type: The type to check for. - :type type: type or tuple of types - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected type, and the value it - got. - """ - return _InstanceOfValidator(type) - - -@attrs(repr=False, frozen=True) -class _MatchesReValidator(object): - regex = attrib() - flags = attrib() - match_func = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.match_func(value): - raise ValueError( - "'{name}' must match regex {regex!r}" - " ({value!r} doesn't)".format( - name=attr.name, regex=self.regex.pattern, value=value - ), - attr, - self.regex, - value, - ) - - def __repr__(self): - return "".format( - regex=self.regex - ) - - -def matches_re(regex, flags=0, func=None): - r""" - A validator that raises `ValueError` if the initializer is called - with a string that doesn't match *regex*. - - :param str regex: a regex string to match against - :param int flags: flags that will be passed to the underlying re function - (default 0) - :param callable func: which underlying `re` function to call (options - are `re.fullmatch`, `re.search`, `re.match`, default - is ``None`` which means either `re.fullmatch` or an emulation of - it on Python 2). For performance reasons, they won't be used directly - but on a pre-`re.compile`\ ed pattern. - - .. versionadded:: 19.2.0 - """ - fullmatch = getattr(re, "fullmatch", None) - valid_funcs = (fullmatch, None, re.search, re.match) - if func not in valid_funcs: - raise ValueError( - "'func' must be one of %s." - % ( - ", ".join( - sorted( - e and e.__name__ or "None" for e in set(valid_funcs) - ) - ), - ) - ) - - pattern = re.compile(regex, flags) - if func is re.match: - match_func = pattern.match - elif func is re.search: - match_func = pattern.search - else: - if fullmatch: - match_func = pattern.fullmatch - else: - pattern = re.compile(r"(?:{})\Z".format(regex), flags) - match_func = pattern.match - - return _MatchesReValidator(pattern, flags, match_func) - - -@attrs(repr=False, slots=True, hash=True) -class _ProvidesValidator(object): - interface = attrib() - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not self.interface.providedBy(value): - raise TypeError( - "'{name}' must provide {interface!r} which {value!r} " - "doesn't.".format( - name=attr.name, interface=self.interface, value=value - ), - attr, - self.interface, - value, - ) - - def __repr__(self): - return "".format( - interface=self.interface - ) - - -def provides(interface): - """ - A validator that raises a `TypeError` if the initializer is called - with an object that does not provide the requested *interface* (checks are - performed using ``interface.providedBy(value)`` (see `zope.interface - `_). - - :param zope.interface.Interface interface: The interface to check for. - - :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected interface, and the - value it got. - """ - return _ProvidesValidator(interface) - - -@attrs(repr=False, slots=True, hash=True) -class _OptionalValidator(object): - validator = attrib() - - def __call__(self, inst, attr, value): - if value is None: - return - - self.validator(inst, attr, value) - - def __repr__(self): - return "".format( - what=repr(self.validator) - ) - - -def optional(validator): - """ - A validator that makes an attribute optional. An optional attribute is one - which can be set to ``None`` in addition to satisfying the requirements of - the sub-validator. - - :param validator: A validator (or a list of validators) that is used for - non-``None`` values. - :type validator: callable or `list` of callables. - - .. versionadded:: 15.1.0 - .. versionchanged:: 17.1.0 *validator* can be a list of validators. - """ - if isinstance(validator, list): - return _OptionalValidator(_AndValidator(validator)) - return _OptionalValidator(validator) - - -@attrs(repr=False, slots=True, hash=True) -class _InValidator(object): - options = attrib() - - def __call__(self, inst, attr, value): - try: - in_options = value in self.options - except TypeError: # e.g. `1 in "abc"` - in_options = False - - if not in_options: - raise ValueError( - "'{name}' must be in {options!r} (got {value!r})".format( - name=attr.name, options=self.options, value=value - ) - ) - - def __repr__(self): - return "".format( - options=self.options - ) - - -def in_(options): - """ - A validator that raises a `ValueError` if the initializer is called - with a value that does not belong in the options provided. The check is - performed using ``value in options``. - - :param options: Allowed options. - :type options: list, tuple, `enum.Enum`, ... - - :raises ValueError: With a human readable error message, the attribute (of - type `attr.Attribute`), the expected options, and the value it - got. - - .. versionadded:: 17.1.0 - """ - return _InValidator(options) - - -@attrs(repr=False, slots=False, hash=True) -class _IsCallableValidator(object): - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if not callable(value): - message = ( - "'{name}' must be callable " - "(got {value!r} that is a {actual!r})." - ) - raise NotCallableError( - msg=message.format( - name=attr.name, value=value, actual=value.__class__ - ), - value=value, - ) - - def __repr__(self): - return "" - - -def is_callable(): - """ - A validator that raises a `attr.exceptions.NotCallableError` if the - initializer is called with a value for this particular attribute - that is not callable. - - .. versionadded:: 19.1.0 - - :raises `attr.exceptions.NotCallableError`: With a human readable error - message containing the attribute (`attr.Attribute`) name, - and the value it got. - """ - return _IsCallableValidator() - - -@attrs(repr=False, slots=True, hash=True) -class _DeepIterable(object): - member_validator = attrib(validator=is_callable()) - iterable_validator = attrib( - default=None, validator=optional(is_callable()) - ) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.iterable_validator is not None: - self.iterable_validator(inst, attr, value) - - for member in value: - self.member_validator(inst, attr, member) - - def __repr__(self): - iterable_identifier = ( - "" - if self.iterable_validator is None - else " {iterable!r}".format(iterable=self.iterable_validator) - ) - return ( - "" - ).format( - iterable_identifier=iterable_identifier, - member=self.member_validator, - ) - - -def deep_iterable(member_validator, iterable_validator=None): - """ - A validator that performs deep validation of an iterable. - - :param member_validator: Validator to apply to iterable members - :param iterable_validator: Validator to apply to iterable itself - (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepIterable(member_validator, iterable_validator) - - -@attrs(repr=False, slots=True, hash=True) -class _DeepMapping(object): - key_validator = attrib(validator=is_callable()) - value_validator = attrib(validator=is_callable()) - mapping_validator = attrib(default=None, validator=optional(is_callable())) - - def __call__(self, inst, attr, value): - """ - We use a callable class to be able to change the ``__repr__``. - """ - if self.mapping_validator is not None: - self.mapping_validator(inst, attr, value) - - for key in value: - self.key_validator(inst, attr, key) - self.value_validator(inst, attr, value[key]) - - def __repr__(self): - return ( - "" - ).format(key=self.key_validator, value=self.value_validator) - - -def deep_mapping(key_validator, value_validator, mapping_validator=None): - """ - A validator that performs deep validation of a dictionary. - - :param key_validator: Validator to apply to dictionary keys - :param value_validator: Validator to apply to dictionary values - :param mapping_validator: Validator to apply to top-level mapping - attribute (optional) - - .. versionadded:: 19.1.0 - - :raises TypeError: if any sub-validators fail - """ - return _DeepMapping(key_validator, value_validator, mapping_validator) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.pyi b/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.pyi deleted file mode 100644 index 9a22abb..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attr/validators.pyi +++ /dev/null @@ -1,66 +0,0 @@ -from typing import ( - Container, - List, - Union, - TypeVar, - Type, - Any, - Optional, - Tuple, - Iterable, - Mapping, - Callable, - Match, - AnyStr, - overload, -) -from . import _ValidatorType - -_T = TypeVar("_T") -_T1 = TypeVar("_T1") -_T2 = TypeVar("_T2") -_T3 = TypeVar("_T3") -_I = TypeVar("_I", bound=Iterable) -_K = TypeVar("_K") -_V = TypeVar("_V") -_M = TypeVar("_M", bound=Mapping) - -# To be more precise on instance_of use some overloads. -# If there are more than 3 items in the tuple then we fall back to Any -@overload -def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... -@overload -def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... -@overload -def instance_of( - type: Tuple[Type[_T1], Type[_T2]] -) -> _ValidatorType[Union[_T1, _T2]]: ... -@overload -def instance_of( - type: Tuple[Type[_T1], Type[_T2], Type[_T3]] -) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... -@overload -def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... -def provides(interface: Any) -> _ValidatorType[Any]: ... -def optional( - validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] -) -> _ValidatorType[Optional[_T]]: ... -def in_(options: Container[_T]) -> _ValidatorType[_T]: ... -def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... -def matches_re( - regex: AnyStr, - flags: int = ..., - func: Optional[ - Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] - ] = ..., -) -> _ValidatorType[AnyStr]: ... -def deep_iterable( - member_validator: _ValidatorType[_T], - iterable_validator: Optional[_ValidatorType[_I]] = ..., -) -> _ValidatorType[_I]: ... -def deep_mapping( - key_validator: _ValidatorType[_K], - value_validator: _ValidatorType[_V], - mapping_validator: Optional[_ValidatorType[_M]] = ..., -) -> _ValidatorType[_M]: ... -def is_callable() -> _ValidatorType[_T]: ... diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE deleted file mode 100644 index 7ae3df9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 Hynek Schlawack - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA deleted file mode 100644 index a106415..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/METADATA +++ /dev/null @@ -1,229 +0,0 @@ -Metadata-Version: 2.1 -Name: attrs -Version: 19.3.0 -Summary: Classes Without Boilerplate -Home-page: https://www.attrs.org/ -Author: Hynek Schlawack -Author-email: hs@ox.cx -Maintainer: Hynek Schlawack -Maintainer-email: hs@ox.cx -License: MIT -Project-URL: Documentation, https://www.attrs.org/ -Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues -Project-URL: Source Code, https://github.com/python-attrs/attrs -Keywords: class,attribute,boilerplate -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Description-Content-Type: text/x-rst -Provides-Extra: azure-pipelines -Requires-Dist: coverage ; extra == 'azure-pipelines' -Requires-Dist: hypothesis ; extra == 'azure-pipelines' -Requires-Dist: pympler ; extra == 'azure-pipelines' -Requires-Dist: pytest (>=4.3.0) ; extra == 'azure-pipelines' -Requires-Dist: six ; extra == 'azure-pipelines' -Requires-Dist: zope.interface ; extra == 'azure-pipelines' -Requires-Dist: pytest-azurepipelines ; extra == 'azure-pipelines' -Provides-Extra: dev -Requires-Dist: coverage ; extra == 'dev' -Requires-Dist: hypothesis ; extra == 'dev' -Requires-Dist: pympler ; extra == 'dev' -Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' -Requires-Dist: six ; extra == 'dev' -Requires-Dist: zope.interface ; extra == 'dev' -Requires-Dist: sphinx ; extra == 'dev' -Requires-Dist: pre-commit ; extra == 'dev' -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: zope.interface ; extra == 'docs' -Provides-Extra: tests -Requires-Dist: coverage ; extra == 'tests' -Requires-Dist: hypothesis ; extra == 'tests' -Requires-Dist: pympler ; extra == 'tests' -Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' -Requires-Dist: six ; extra == 'tests' -Requires-Dist: zope.interface ; extra == 'tests' - -.. image:: https://www.attrs.org/en/latest/_static/attrs_logo.png - :alt: attrs Logo - -====================================== -``attrs``: Classes Without Boilerplate -====================================== - -.. image:: https://readthedocs.org/projects/attrs/badge/?version=stable - :target: https://www.attrs.org/en/stable/?badge=stable - :alt: Documentation Status - -.. image:: https://attrs.visualstudio.com/attrs/_apis/build/status/python-attrs.attrs?branchName=master - :target: https://attrs.visualstudio.com/attrs/_build/latest?definitionId=1&branchName=master - :alt: CI Status - -.. image:: https://codecov.io/github/python-attrs/attrs/branch/master/graph/badge.svg - :target: https://codecov.io/github/python-attrs/attrs - :alt: Test Coverage - -.. image:: https://img.shields.io/badge/code%20style-black-000000.svg - :target: https://github.com/psf/black - :alt: Code style: black - -.. teaser-begin - -``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder `_ methods). - -Its main goal is to help you to write **concise** and **correct** software without slowing down your code. - -.. -spiel-end- - -For that, it gives you a class decorator and a way to declaratively define the attributes on that class: - -.. -code-begin- - -.. code-block:: pycon - - >>> import attr - - >>> @attr.s - ... class SomeClass(object): - ... a_number = attr.ib(default=42) - ... list_of_numbers = attr.ib(factory=list) - ... - ... def hard_math(self, another_number): - ... return self.a_number + sum(self.list_of_numbers) * another_number - - - >>> sc = SomeClass(1, [1, 2, 3]) - >>> sc - SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) - - >>> sc.hard_math(3) - 19 - >>> sc == SomeClass(1, [1, 2, 3]) - True - >>> sc != SomeClass(2, [3, 2, 1]) - True - - >>> attr.asdict(sc) - {'a_number': 1, 'list_of_numbers': [1, 2, 3]} - - >>> SomeClass() - SomeClass(a_number=42, list_of_numbers=[]) - - >>> C = attr.make_class("C", ["a", "b"]) - >>> C("foo", "bar") - C(a='foo', b='bar') - - -After *declaring* your attributes ``attrs`` gives you: - -- a concise and explicit overview of the class's attributes, -- a nice human-readable ``__repr__``, -- a complete set of comparison methods (equality and ordering), -- an initializer, -- and much more, - -*without* writing dull boilerplate code again and again and *without* runtime performance penalties. - -On Python 3.6 and later, you can often even drop the calls to ``attr.ib()`` by using `type annotations `_. - -This gives you the power to use actual classes with actual types in your code instead of confusing ``tuple``\ s or `confusingly behaving `_ ``namedtuple``\ s. -Which in turn encourages you to write *small classes* that do `one thing well `_. -Never again violate the `single responsibility principle `_ just because implementing ``__init__`` et al is a painful drag. - - -.. -testimonials- - -Testimonials -============ - -**Amber Hawkie Brown**, Twisted Release Manager and Computer Owl: - - Writing a fully-functional class using attrs takes me less time than writing this testimonial. - - -**Glyph Lefkowitz**, creator of `Twisted `_, `Automat `_, and other open source software, in `The One Python Library Everyone Needs `_: - - I’m looking forward to is being able to program in Python-with-attrs everywhere. - It exerts a subtle, but positive, design influence in all the codebases I’ve see it used in. - - -**Kenneth Reitz**, creator of `Requests `_ (`on paper no less `_!): - - attrs—classes for humans. I like it. - - -**Łukasz Langa**, creator of `Black `_, prolific Python core developer, and release manager for Python 3.8 and 3.9: - - I'm increasingly digging your attr.ocity. Good job! - - -.. -end- - -.. -project-information- - -Getting Help -============ - -Please use the ``python-attrs`` tag on `StackOverflow `_ to get help. - -Answering questions of your fellow developers is also great way to help the project! - - -Project Information -=================== - -``attrs`` is released under the `MIT `_ license, -its documentation lives at `Read the Docs `_, -the code on `GitHub `_, -and the latest release on `PyPI `_. -It’s rigorously tested on Python 2.7, 3.4+, and PyPy. - -We collect information on **third-party extensions** in our `wiki `_. -Feel free to browse and add your own! - -If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! - - -Release Information -=================== - -19.3.0 (2019-10-15) -------------------- - -Changes -^^^^^^^ - -- Fixed ``auto_attribs`` usage when default values cannot be compared directly with ``==``, such as ``numpy`` arrays. - `#585 `_ - -`Full changelog `_. - -Credits -======= - -``attrs`` is written and maintained by `Hynek Schlawack `_. - -The development is kindly supported by `Variomedia AG `_. - -A full list of contributors can be found in `GitHub's overview `_. - -It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. -Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD deleted file mode 100644 index c9acc4a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/RECORD +++ /dev/null @@ -1,33 +0,0 @@ -attr/__init__.py,sha256=ONaI-ZEGOIC7IDqH2HANgesnOxPE1m0GIRRYPPsXEHk,1349 -attr/__init__.pyi,sha256=fOnMRTF00b5J23PYPF74u66UVhVzzm0KYVxzmVXHPw0,8257 -attr/__pycache__/__init__.cpython-37.pyc,, -attr/__pycache__/_compat.cpython-37.pyc,, -attr/__pycache__/_config.cpython-37.pyc,, -attr/__pycache__/_funcs.cpython-37.pyc,, -attr/__pycache__/_make.cpython-37.pyc,, -attr/__pycache__/_version_info.cpython-37.pyc,, -attr/__pycache__/converters.cpython-37.pyc,, -attr/__pycache__/exceptions.cpython-37.pyc,, -attr/__pycache__/filters.cpython-37.pyc,, -attr/__pycache__/validators.cpython-37.pyc,, -attr/_compat.py,sha256=-pJtdtqgCg0K6rH_BWf3wKuTum58GD-WWPclQQ2SUaU,7326 -attr/_config.py,sha256=_KvW0mQdH2PYjHc0YfIUaV_o2pVfM7ziMEYTxwmEhOA,514 -attr/_funcs.py,sha256=unAJfNGSTOzxyFzkj7Rs3O1bfsQodmXyir9uZKen-vY,9696 -attr/_make.py,sha256=HhjGhFEbnxPKuUb9hFmAjXoQGpekniw1IEF3_Z-vwCc,70807 -attr/_version_info.py,sha256=azMi1lNelb3cJvvYUMXsXVbUANkRzbD5IEiaXVpeVr4,2162 -attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 -attr/converters.py,sha256=5QJRYSXE8G7PW0289y_SPwvvZIcw-nJIuBlfYVdB4BQ,2141 -attr/converters.pyi,sha256=wAhCoOT1MFV8t323rpD87O7bxQ8CYLTPiBQd-29BieI,351 -attr/exceptions.py,sha256=hbhOa3b4W8_mRrbj3FsMTR4Bt5xzbJs5xaFTWn8s6h4,1635 -attr/exceptions.pyi,sha256=4zuaJyl2axxWbqnZgxo_2oTpPNbyowEw3A4hqV5PmAc,458 -attr/filters.py,sha256=weDxwATsa69T_0bPVjiM1fGsciAMQmwhY5G8Jm5BxuI,1098 -attr/filters.pyi,sha256=xDpmKQlFdssgxGa5tsl1ADh_3zwAwAT4vUhd8h-8-Tk,214 -attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -attr/validators.py,sha256=8AsxgdDgh3sGPseiUIMPGcTr6PvaDYfH3AK46tsvs8U,11460 -attr/validators.pyi,sha256=vZgsJqUwrJevh4v_Hd7_RSXqDrBctE6-3AEZ7uYKodo,1868 -attrs-19.3.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -attrs-19.3.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 -attrs-19.3.0.dist-info/METADATA,sha256=WmnjYy_TftebL3pewXyGEaD4TZRrLUEHk3frEkAtqL0,9022 -attrs-19.3.0.dist-info/RECORD,, -attrs-19.3.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 -attrs-19.3.0.dist-info/top_level.txt,sha256=tlRYMddkRlKPqJ96wP2_j9uEsmcNHgD2SbuWd4CzGVU,5 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL deleted file mode 100644 index 8b701e9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.6) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt deleted file mode 100644 index 66a062d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/attrs-19.3.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -attr diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/DESCRIPTION.rst b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/DESCRIPTION.rst deleted file mode 100644 index 0b0953d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,50 +0,0 @@ -Certifi: Python SSL Certificates -================================ - -`Certifi`_ is a carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python2.7/site-packages/certifi/cacert.pem - -Enjoy! - -1024-bit Root Certificates -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Browsers and certificate authorities have concluded that 1024-bit keys are -unacceptably weak for certificates, particularly root certificates. For this -reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its -bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) -certificate from the same CA. Because Mozilla removed these certificates from -its bundle, ``certifi`` removed them as well. - -In previous versions, ``certifi`` provided the ``certifi.old_where()`` function -to intentionally re-add the 1024-bit roots back into your bundle. This was not -recommended in production and therefore was removed at the end of 2018. - -.. _`Certifi`: https://certifi.io/en/latest/ -.. _`Requests`: http://docs.python-requests.org/en/latest/ - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/METADATA deleted file mode 100644 index 6ecc70c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/METADATA +++ /dev/null @@ -1,74 +0,0 @@ -Metadata-Version: 2.0 -Name: certifi -Version: 2019.9.11 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://certifi.io/ -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 - -Certifi: Python SSL Certificates -================================ - -`Certifi`_ is a carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python2.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python2.7/site-packages/certifi/cacert.pem - -Enjoy! - -1024-bit Root Certificates -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Browsers and certificate authorities have concluded that 1024-bit keys are -unacceptably weak for certificates, particularly root certificates. For this -reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its -bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) -certificate from the same CA. Because Mozilla removed these certificates from -its bundle, ``certifi`` removed them as well. - -In previous versions, ``certifi`` provided the ``certifi.old_where()`` function -to intentionally re-add the 1024-bit roots back into your bundle. This was not -recommended in production and therefore was removed at the end of 2018. - -.. _`Certifi`: https://certifi.io/en/latest/ -.. _`Requests`: http://docs.python-requests.org/en/latest/ - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/RECORD deleted file mode 100644 index 83ea0cc..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -certifi-2019.9.11.dist-info/DESCRIPTION.rst,sha256=aLNHONztn2ZiBpSTivVFy6EDIWmuNYSsEQwx4NWbvB4,1580 -certifi-2019.9.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2019.9.11.dist-info/METADATA,sha256=M0Gen7rhgJKIvggZuENXqcZexg74gXdjGC679bMeoDw,2522 -certifi-2019.9.11.dist-info/RECORD,, -certifi-2019.9.11.dist-info/WHEEL,sha256=5wvfB7GvgZAbKBSE9uX9Zbi6LCL-_KgezgHblXhCRnM,113 -certifi-2019.9.11.dist-info/metadata.json,sha256=NppG2TtVr6va5nwyG9pxGhktdvudS-IfpTkaQaWKlBE,1022 -certifi-2019.9.11.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=WFoavXHhpX-BZ5kbvyinZTbhLsqPJypLKIZu29nUsQg,52 -certifi/__main__.py,sha256=FiOYt1Fltst7wk9DRa6GCoBr8qBUxlNQu_MKJf04E6s,41 -certifi/__pycache__/__init__.cpython-37.pyc,, -certifi/__pycache__/__main__.cpython-37.pyc,, -certifi/__pycache__/core.cpython-37.pyc,, -certifi/cacert.pem,sha256=cVC1b0T-OcQzgdcRql2yMxT7O08O6pcJHnuO9nbLLn0,278533 -certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/WHEEL deleted file mode 100644 index 7bf9daa..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.30.0.a0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/metadata.json b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/metadata.json deleted file mode 100644 index 7f155f5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "extensions": {"python.details": {"contacts": [{"email": "me@kennethreitz.com", "name": "Kenneth Reitz", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://certifi.io/"}}}, "generator": "bdist_wheel (0.30.0.a0)", "license": "MPL-2.0", "metadata_version": "2.0", "name": "certifi", "summary": "Python package for providing Mozilla's CA Bundle.", "version": "2019.9.11"} \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi-2019.9.11.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/certifi/__init__.py deleted file mode 100644 index 8e358e4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .core import where - -__version__ = "2019.09.11" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi/__main__.py b/.tox/py37-normal/lib/python3.7/site-packages/certifi/__main__.py deleted file mode 100644 index 5f1da0d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi/__main__.py +++ /dev/null @@ -1,2 +0,0 @@ -from certifi import where -print(where()) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi/cacert.pem b/.tox/py37-normal/lib/python3.7/site-packages/certifi/cacert.pem deleted file mode 100644 index 70fa91f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4558 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- diff --git a/.tox/py37-normal/lib/python3.7/site-packages/certifi/core.py b/.tox/py37-normal/lib/python3.7/site-packages/certifi/core.py deleted file mode 100644 index 7271acf..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/certifi/core.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- - -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem. -""" -import os - - -def where(): - f = os.path.dirname(__file__) - - return os.path.join(f, 'cacert.pem') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst deleted file mode 100644 index c0f044d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,70 +0,0 @@ -Chardet: The Universal Character Encoding Detector --------------------------------------------------- - -.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg - :alt: Build status - :target: https://travis-ci.org/chardet/chardet - -.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg - :target: https://coveralls.io/r/chardet/chardet - -.. image:: https://img.shields.io/pypi/v/chardet.svg - :target: https://warehouse.python.org/project/chardet/ - :alt: Latest version on PyPI - -.. image:: https://img.shields.io/pypi/l/chardet.svg - :alt: License - - -Detects - - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) - - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) - - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) - - EUC-KR, ISO-2022-KR (Korean) - - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) - - ISO-8859-5, windows-1251 (Bulgarian) - - ISO-8859-1, windows-1252 (Western European languages) - - ISO-8859-7, windows-1253 (Greek) - - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) - - TIS-620 (Thai) - -.. note:: - Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily - disabled until we can retrain the models. - -Requires Python 2.6, 2.7, or 3.3+. - -Installation ------------- - -Install from `PyPI `_:: - - pip install chardet - -Documentation -------------- - -For users, docs are now available at https://chardet.readthedocs.io/. - -Command-line Tool ------------------ - -chardet comes with a command-line script which reports on the encodings of one -or more files:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -About ------ - -This is a continuation of Mark Pilgrim's excellent chardet. Previously, two -versions needed to be maintained: one that supported python 2.x and one that -supported python 3.x. We've recently merged with `Ian Cordasco `_'s -`charade `_ fork, so now we have one -coherent version that works for Python 2.6+. - -:maintainer: Dan Blanchard - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/METADATA deleted file mode 100644 index 1427867..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/METADATA +++ /dev/null @@ -1,96 +0,0 @@ -Metadata-Version: 2.0 -Name: chardet -Version: 3.0.4 -Summary: Universal encoding detector for Python 2 and 3 -Home-page: https://github.com/chardet/chardet -Author: Daniel Blanchard -Author-email: dan.blanchard@gmail.com -License: LGPL -Keywords: encoding,i18n,xml -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Linguistic - -Chardet: The Universal Character Encoding Detector --------------------------------------------------- - -.. image:: https://img.shields.io/travis/chardet/chardet/stable.svg - :alt: Build status - :target: https://travis-ci.org/chardet/chardet - -.. image:: https://img.shields.io/coveralls/chardet/chardet/stable.svg - :target: https://coveralls.io/r/chardet/chardet - -.. image:: https://img.shields.io/pypi/v/chardet.svg - :target: https://warehouse.python.org/project/chardet/ - :alt: Latest version on PyPI - -.. image:: https://img.shields.io/pypi/l/chardet.svg - :alt: License - - -Detects - - ASCII, UTF-8, UTF-16 (2 variants), UTF-32 (4 variants) - - Big5, GB2312, EUC-TW, HZ-GB-2312, ISO-2022-CN (Traditional and Simplified Chinese) - - EUC-JP, SHIFT_JIS, CP932, ISO-2022-JP (Japanese) - - EUC-KR, ISO-2022-KR (Korean) - - KOI8-R, MacCyrillic, IBM855, IBM866, ISO-8859-5, windows-1251 (Cyrillic) - - ISO-8859-5, windows-1251 (Bulgarian) - - ISO-8859-1, windows-1252 (Western European languages) - - ISO-8859-7, windows-1253 (Greek) - - ISO-8859-8, windows-1255 (Visual and Logical Hebrew) - - TIS-620 (Thai) - -.. note:: - Our ISO-8859-2 and windows-1250 (Hungarian) probers have been temporarily - disabled until we can retrain the models. - -Requires Python 2.6, 2.7, or 3.3+. - -Installation ------------- - -Install from `PyPI `_:: - - pip install chardet - -Documentation -------------- - -For users, docs are now available at https://chardet.readthedocs.io/. - -Command-line Tool ------------------ - -chardet comes with a command-line script which reports on the encodings of one -or more files:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -About ------ - -This is a continuation of Mark Pilgrim's excellent chardet. Previously, two -versions needed to be maintained: one that supported python 2.x and one that -supported python 3.x. We've recently merged with `Ian Cordasco `_'s -`charade `_ fork, so now we have one -coherent version that works for Python 2.6+. - -:maintainer: Dan Blanchard - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/RECORD deleted file mode 100644 index 655e02f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/RECORD +++ /dev/null @@ -1,91 +0,0 @@ -../../../bin/chardetect,sha256=TILYUysO3Eg6edpzkFBXVl9jdnELa7yzXuMapKaxwFw,258 -chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 -chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 -chardet-3.0.4.dist-info/RECORD,, -chardet-3.0.4.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 -chardet-3.0.4.dist-info/entry_points.txt,sha256=fAMmhu5eJ-zAJ-smfqQwRClQ3-nozOCmvJ6-E8lgGJo,60 -chardet-3.0.4.dist-info/metadata.json,sha256=0htbRM18ujyGZDdfowgAqj6Hq2eQtwzwyhaEveKntgo,1375 -chardet-3.0.4.dist-info/top_level.txt,sha256=AowzBbZy4x8EirABDdJSLJZMkJ_53iIag8xfKR6D7kI,8 -chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559 -chardet/__pycache__/__init__.cpython-37.pyc,, -chardet/__pycache__/big5freq.cpython-37.pyc,, -chardet/__pycache__/big5prober.cpython-37.pyc,, -chardet/__pycache__/chardistribution.cpython-37.pyc,, -chardet/__pycache__/charsetgroupprober.cpython-37.pyc,, -chardet/__pycache__/charsetprober.cpython-37.pyc,, -chardet/__pycache__/codingstatemachine.cpython-37.pyc,, -chardet/__pycache__/compat.cpython-37.pyc,, -chardet/__pycache__/cp949prober.cpython-37.pyc,, -chardet/__pycache__/enums.cpython-37.pyc,, -chardet/__pycache__/escprober.cpython-37.pyc,, -chardet/__pycache__/escsm.cpython-37.pyc,, -chardet/__pycache__/eucjpprober.cpython-37.pyc,, -chardet/__pycache__/euckrfreq.cpython-37.pyc,, -chardet/__pycache__/euckrprober.cpython-37.pyc,, -chardet/__pycache__/euctwfreq.cpython-37.pyc,, -chardet/__pycache__/euctwprober.cpython-37.pyc,, -chardet/__pycache__/gb2312freq.cpython-37.pyc,, -chardet/__pycache__/gb2312prober.cpython-37.pyc,, -chardet/__pycache__/hebrewprober.cpython-37.pyc,, -chardet/__pycache__/jisfreq.cpython-37.pyc,, -chardet/__pycache__/jpcntx.cpython-37.pyc,, -chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,, -chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,, -chardet/__pycache__/langgreekmodel.cpython-37.pyc,, -chardet/__pycache__/langhebrewmodel.cpython-37.pyc,, -chardet/__pycache__/langhungarianmodel.cpython-37.pyc,, -chardet/__pycache__/langthaimodel.cpython-37.pyc,, -chardet/__pycache__/langturkishmodel.cpython-37.pyc,, -chardet/__pycache__/latin1prober.cpython-37.pyc,, -chardet/__pycache__/mbcharsetprober.cpython-37.pyc,, -chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,, -chardet/__pycache__/mbcssm.cpython-37.pyc,, -chardet/__pycache__/sbcharsetprober.cpython-37.pyc,, -chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,, -chardet/__pycache__/sjisprober.cpython-37.pyc,, -chardet/__pycache__/universaldetector.cpython-37.pyc,, -chardet/__pycache__/utf8prober.cpython-37.pyc,, -chardet/__pycache__/version.cpython-37.pyc,, -chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254 -chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757 -chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411 -chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787 -chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110 -chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 -chardet/cli/__pycache__/__init__.cpython-37.pyc,, -chardet/cli/__pycache__/chardetect.cpython-37.pyc,, -chardet/cli/chardetect.py,sha256=YBO8L4mXo0WR6_-Fjh_8QxPBoEBNqB9oNxNrdc54AQs,2738 -chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590 -chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134 -chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855 -chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661 -chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950 -chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510 -chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749 -chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546 -chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748 -chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621 -chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747 -chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715 -chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754 -chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838 -chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777 -chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643 -chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839 -chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948 -chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688 -chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345 -chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592 -chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290 -chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102 -chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370 -chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413 -chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012 -chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481 -chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657 -chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546 -chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774 -chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485 -chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766 -chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/WHEEL deleted file mode 100644 index 8b6dd1b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.29.0) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/entry_points.txt b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/entry_points.txt deleted file mode 100644 index a884269..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -chardetect = chardet.cli.chardetect:main - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/metadata.json b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/metadata.json deleted file mode 100644 index 8cdf025..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic"], "extensions": {"python.commands": {"wrap_console": {"chardetect": "chardet.cli.chardetect:main"}}, "python.details": {"contacts": [{"email": "dan.blanchard@gmail.com", "name": "Daniel Blanchard", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/chardet/chardet"}}, "python.exports": {"console_scripts": {"chardetect": "chardet.cli.chardetect:main"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["encoding", "i18n", "xml"], "license": "LGPL", "metadata_version": "2.0", "name": "chardet", "summary": "Universal encoding detector for Python 2 and 3", "test_requires": [{"requires": ["hypothesis", "pytest"]}], "version": "3.0.4"} \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/top_level.txt deleted file mode 100644 index 79236f2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet-3.0.4.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -chardet diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/__init__.py deleted file mode 100644 index 0f9f820..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -from .compat import PY2, PY3 -from .universaldetector import UniversalDetector -from .version import __version__, VERSION - - -def detect(byte_str): - """ - Detect the encoding of the given byte string. - - :param byte_str: The byte sequence to examine. - :type byte_str: ``bytes`` or ``bytearray`` - """ - if not isinstance(byte_str, bytearray): - if not isinstance(byte_str, bytes): - raise TypeError('Expected object of type bytes or bytearray, got: ' - '{0}'.format(type(byte_str))) - else: - byte_str = bytearray(byte_str) - detector = UniversalDetector() - detector.feed(byte_str) - return detector.close() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5freq.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5freq.py deleted file mode 100644 index 38f3251..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5freq.py +++ /dev/null @@ -1,386 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Big5 frequency table -# by Taiwan's Mandarin Promotion Council -# -# -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -#Char to FreqOrder table -BIG5_TABLE_SIZE = 5376 - -BIG5_CHAR_TO_FREQ_ORDER = ( - 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 -3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 -1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 - 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 -3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 -4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 -5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 - 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 - 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 - 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 -2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 -1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 -3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 - 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 -3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 -2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 - 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 -3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 -1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 -5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 - 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 -5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 -1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 - 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 - 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 -3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 -3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 - 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 -2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 -2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 - 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 - 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 -3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 -1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 -1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 -1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 -2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 - 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 -4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 -1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 -5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 -2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 - 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 - 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 - 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 - 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 -5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 - 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 -1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 - 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 - 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 -5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 -1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 - 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 -3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 -4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 -3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 - 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 - 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 -1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 -4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 -3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 -3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 -2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 -5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 -3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 -5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 -1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 -2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 -1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 - 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 -1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 -4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 -3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 - 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 - 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 - 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 -2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 -5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 -1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 -2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 -1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 -1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 -5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 -5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 -5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 -3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 -4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 -4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 -2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 -5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 -3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 - 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 -5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 -5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 -1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 -2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 -3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 -4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 -5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 -3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 -4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 -1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 -1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 -4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 -1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 - 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 -1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 -1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 -3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 - 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 -5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 -2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 -1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 -1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 -5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 - 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 -4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 - 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 -2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 - 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 -1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 -1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 - 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 -4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 -4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 -1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 -3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 -5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 -5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 -1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 -2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 -1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 -3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 -2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 -3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 -2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 -4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 -4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 -3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 - 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 -3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 - 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 -3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 -4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 -3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 -1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 -5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 - 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 -5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 -1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 - 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 -4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 -4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 - 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 -2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 -2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 -3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 -1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 -4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 -2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 -1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 -1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 -2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 -3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 -1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 -5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 -1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 -4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 -1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 - 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 -1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 -4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 -4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 -2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 -1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 -4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 - 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 -5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 -2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 -3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 -4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 - 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 -5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 -5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 -1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 -4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 -4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 -2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 -3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 -3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 -2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 -1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 -4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 -3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 -3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 -2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 -4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 -5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 -3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 -2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 -3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 -1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 -2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 -3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 -4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 -2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 -2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 -5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 -1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 -2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 -1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 -3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 -4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 -2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 -3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 -3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 -2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 -4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 -2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 -3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 -4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 -5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 -3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 - 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 -1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 -4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 -1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 -4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 -5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 - 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 -5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 -5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 -2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 -3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 -2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 -2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 - 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 -1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 -4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 -3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 -3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 - 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 -2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 - 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 -2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 -4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 -1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 -4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 -1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 -3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 - 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 -3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 -5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 -5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 -3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 -3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 -1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 -2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 -5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 -1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 -1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 -3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 - 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 -1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 -4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 -5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 -2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 -3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 - 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 -1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 -2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 -2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 -5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 -5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 -5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 -2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 -2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 -1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 -4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 -3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 -3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 -4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 -4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 -2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 -2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 -5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 -4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 -5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 -4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 - 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 - 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 -1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 -3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 -4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 -1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 -5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 -2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 -2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 -3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 -5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 -1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 -3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 -5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 -1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 -5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 -2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 -3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 -2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 -3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 -3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 -3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 -4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 - 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 -2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 -4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 -3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 -5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 -1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 -5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 - 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 -1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 - 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 -4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 -1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 -4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 -1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 - 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 -3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 -4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 -5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 - 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 -3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 - 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 -2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 -) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5prober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5prober.py deleted file mode 100644 index 98f9970..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/big5prober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import Big5DistributionAnalysis -from .mbcssm import BIG5_SM_MODEL - - -class Big5Prober(MultiByteCharSetProber): - def __init__(self): - super(Big5Prober, self).__init__() - self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) - self.distribution_analyzer = Big5DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "Big5" - - @property - def language(self): - return "Chinese" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/chardistribution.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/chardistribution.py deleted file mode 100644 index c0395f4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/chardistribution.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE, - EUCTW_TYPICAL_DISTRIBUTION_RATIO) -from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE, - EUCKR_TYPICAL_DISTRIBUTION_RATIO) -from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE, - GB2312_TYPICAL_DISTRIBUTION_RATIO) -from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, - BIG5_TYPICAL_DISTRIBUTION_RATIO) -from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE, - JIS_TYPICAL_DISTRIBUTION_RATIO) - - -class CharDistributionAnalysis(object): - ENOUGH_DATA_THRESHOLD = 1024 - SURE_YES = 0.99 - SURE_NO = 0.01 - MINIMUM_DATA_THRESHOLD = 3 - - def __init__(self): - # Mapping table to get frequency order from char order (get from - # GetOrder()) - self._char_to_freq_order = None - self._table_size = None # Size of above table - # This is a constant value which varies from language to language, - # used in calculating confidence. See - # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html - # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None - self.reset() - - def reset(self): - """reset analyser, clear any state""" - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - self._total_chars = 0 # Total characters encountered - # The number of characters whose frequency order is less than 512 - self._freq_chars = 0 - - def feed(self, char, char_len): - """feed a character with known length""" - if char_len == 2: - # we only care about 2-bytes character in our distribution analysis - order = self.get_order(char) - else: - order = -1 - if order >= 0: - self._total_chars += 1 - # order is valid - if order < self._table_size: - if 512 > self._char_to_freq_order[order]: - self._freq_chars += 1 - - def get_confidence(self): - """return confidence based on existing data""" - # if we didn't receive any character in our consideration range, - # return negative answer - if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD: - return self.SURE_NO - - if self._total_chars != self._freq_chars: - r = (self._freq_chars / ((self._total_chars - self._freq_chars) - * self.typical_distribution_ratio)) - if r < self.SURE_YES: - return r - - # normalize confidence (we don't want to be 100% sure) - return self.SURE_YES - - def got_enough_data(self): - # It is not necessary to receive all data to draw conclusion. - # For charset detection, certain amount of data is enough - return self._total_chars > self.ENOUGH_DATA_THRESHOLD - - def get_order(self, byte_str): - # We do not handle characters based on the original encoding string, - # but convert this encoding string to a number, here called order. - # This allows multiple encodings of a language to share one frequency - # table. - return -1 - - -class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCTWDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER - self._table_size = EUCTW_TABLE_SIZE - self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-TW encoding, we are interested - # first byte range: 0xc4 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xC4: - return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1 - else: - return -1 - - -class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCKRDistributionAnalysis, self).__init__() - self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER - self._table_size = EUCKR_TABLE_SIZE - self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-KR encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char = byte_str[0] - if first_char >= 0xB0: - return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1 - else: - return -1 - - -class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(GB2312DistributionAnalysis, self).__init__() - self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER - self._table_size = GB2312_TABLE_SIZE - self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for GB2312 encoding, we are interested - # first byte range: 0xb0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0xB0) and (second_char >= 0xA1): - return 94 * (first_char - 0xB0) + second_char - 0xA1 - else: - return -1 - - -class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(Big5DistributionAnalysis, self).__init__() - self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER - self._table_size = BIG5_TABLE_SIZE - self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for big5 encoding, we are interested - # first byte range: 0xa4 -- 0xfe - # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if first_char >= 0xA4: - if second_char >= 0xA1: - return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63 - else: - return 157 * (first_char - 0xA4) + second_char - 0x40 - else: - return -1 - - -class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(SJISDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for sjis encoding, we are interested - # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe - # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe - # no validation needed here. State machine has done that - first_char, second_char = byte_str[0], byte_str[1] - if (first_char >= 0x81) and (first_char <= 0x9F): - order = 188 * (first_char - 0x81) - elif (first_char >= 0xE0) and (first_char <= 0xEF): - order = 188 * (first_char - 0xE0 + 31) - else: - return -1 - order = order + second_char - 0x40 - if second_char > 0x7F: - order = -1 - return order - - -class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): - super(EUCJPDistributionAnalysis, self).__init__() - self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER - self._table_size = JIS_TABLE_SIZE - self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - - def get_order(self, byte_str): - # for euc-JP encoding, we are interested - # first byte range: 0xa0 -- 0xfe - # second byte range: 0xa1 -- 0xfe - # no validation needed here. State machine has done that - char = byte_str[0] - if char >= 0xA0: - return 94 * (char - 0xA1) + byte_str[1] - 0xa1 - else: - return -1 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetgroupprober.py deleted file mode 100644 index 8b3738e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetgroupprober.py +++ /dev/null @@ -1,106 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState -from .charsetprober import CharSetProber - - -class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): - super(CharSetGroupProber, self).__init__(lang_filter=lang_filter) - self._active_num = 0 - self.probers = [] - self._best_guess_prober = None - - def reset(self): - super(CharSetGroupProber, self).reset() - self._active_num = 0 - for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 - self._best_guess_prober = None - - @property - def charset_name(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.charset_name - - @property - def language(self): - if not self._best_guess_prober: - self.get_confidence() - if not self._best_guess_prober: - return None - return self._best_guess_prober.language - - def feed(self, byte_str): - for prober in self.probers: - if not prober: - continue - if not prober.active: - continue - state = prober.feed(byte_str) - if not state: - continue - if state == ProbingState.FOUND_IT: - self._best_guess_prober = prober - return self.state - elif state == ProbingState.NOT_ME: - prober.active = False - self._active_num -= 1 - if self._active_num <= 0: - self._state = ProbingState.NOT_ME - return self.state - return self.state - - def get_confidence(self): - state = self.state - if state == ProbingState.FOUND_IT: - return 0.99 - elif state == ProbingState.NOT_ME: - return 0.01 - best_conf = 0.0 - self._best_guess_prober = None - for prober in self.probers: - if not prober: - continue - if not prober.active: - self.logger.debug('%s not active', prober.charset_name) - continue - conf = prober.get_confidence() - self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf) - if best_conf < conf: - best_conf = conf - self._best_guess_prober = prober - if not self._best_guess_prober: - return 0.0 - return best_conf diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetprober.py deleted file mode 100644 index eac4e59..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/charsetprober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging -import re - -from .enums import ProbingState - - -class CharSetProber(object): - - SHORTCUT_THRESHOLD = 0.95 - - def __init__(self, lang_filter=None): - self._state = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - - def reset(self): - self._state = ProbingState.DETECTING - - @property - def charset_name(self): - return None - - def feed(self, buf): - pass - - @property - def state(self): - return self._state - - def get_confidence(self): - return 0.0 - - @staticmethod - def filter_high_byte_only(buf): - buf = re.sub(b'([\x00-\x7F])+', b' ', buf) - return buf - - @staticmethod - def filter_international_words(buf): - """ - We define three types of bytes: - alphabet: english alphabets [a-zA-Z] - international: international characters [\x80-\xFF] - marker: everything else [^a-zA-Z\x80-\xFF] - - The input buffer can be thought to contain a series of words delimited - by markers. This function works to filter all words that contain at - least one international character. All contiguous sequences of markers - are replaced by a single space ascii character. - - This filter applies to all scripts which do not use English characters. - """ - filtered = bytearray() - - # This regex expression filters out only words that have at-least one - # international character. The word may include one marker character at - # the end. - words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?', - buf) - - for word in words: - filtered.extend(word[:-1]) - - # If the last character in the word is a marker, replace it with a - # space as markers shouldn't affect our analysis (they are used - # similarly across all languages and may thus have similar - # frequencies). - last_char = word[-1:] - if not last_char.isalpha() and last_char < b'\x80': - last_char = b' ' - filtered.extend(last_char) - - return filtered - - @staticmethod - def filter_with_english_letters(buf): - """ - Returns a copy of ``buf`` that retains only the sequences of English - alphabet and high byte characters that are not between <> characters. - Also retains English alphabet and high byte characters immediately - before occurrences of >. - - This filter can be applied to all scripts which contain both English - characters and extended ASCII characters, but is currently only used by - ``Latin1Prober``. - """ - filtered = bytearray() - in_tag = False - prev = 0 - - for curr in range(len(buf)): - # Slice here to get bytes instead of an int with Python 3 - buf_char = buf[curr:curr + 1] - # Check if we're coming out of or entering an HTML tag - if buf_char == b'>': - in_tag = False - elif buf_char == b'<': - in_tag = True - - # If current character is not extended-ASCII and not alphabetic... - if buf_char < b'\x80' and not buf_char.isalpha(): - # ...and we're not in a tag - if curr > prev and not in_tag: - # Keep everything after last non-extended-ASCII, - # non-alphabetic character - filtered.extend(buf[prev:curr]) - # Output a space to delimit stretch we kept - filtered.extend(b' ') - prev = curr + 1 - - # If we're not in a tag... - if not in_tag: - # Keep everything after last non-extended-ASCII, non-alphabetic - # character - filtered.extend(buf[prev:]) - - return filtered diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/__init__.py deleted file mode 100644 index 8b13789..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/chardetect.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/chardetect.py deleted file mode 100644 index f0a4cc5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cli/chardetect.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" - -from __future__ import absolute_import, print_function, unicode_literals - -import argparse -import sys - -from chardet import __version__ -from chardet.compat import PY2 -from chardet.universaldetector import UniversalDetector - - -def description_of(lines, name='stdin'): - """ - Return a string describing the probable encoding of a file or - list of strings. - - :param lines: The lines to get the encoding of. - :type lines: Iterable of bytes - :param name: Name of file or collection of lines - :type name: str - """ - u = UniversalDetector() - for line in lines: - line = bytearray(line) - u.feed(line) - # shortcut out of the loop to save reading further - particularly useful if we read a BOM. - if u.done: - break - u.close() - result = u.result - if PY2: - name = name.decode(sys.getfilesystemencoding(), 'ignore') - if result['encoding']: - return '{0}: {1} with confidence {2}'.format(name, result['encoding'], - result['confidence']) - else: - return '{0}: no result'.format(name) - - -def main(argv=None): - """ - Handles command line arguments and gets things started. - - :param argv: List of arguments, as if specified on the command-line. - If None, ``sys.argv[1:]`` is used instead. - :type argv: list of str - """ - # Get command line arguments - parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings") - parser.add_argument('input', - help='File whose encoding we would like to determine. \ - (default: stdin)', - type=argparse.FileType('rb'), nargs='*', - default=[sys.stdin if PY2 else sys.stdin.buffer]) - parser.add_argument('--version', action='version', - version='%(prog)s {0}'.format(__version__)) - args = parser.parse_args(argv) - - for f in args.input: - if f.isatty(): - print("You are running chardetect interactively. Press " + - "CTRL-D twice at the start of a blank line to signal the " + - "end of your input. If you want help, run chardetect " + - "--help\n", file=sys.stderr) - print(description_of(f, f.name)) - - -if __name__ == '__main__': - main() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/codingstatemachine.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/codingstatemachine.py deleted file mode 100644 index 68fba44..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/codingstatemachine.py +++ /dev/null @@ -1,88 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import logging - -from .enums import MachineState - - -class CodingStateMachine(object): - """ - A state machine to verify a byte sequence for a particular encoding. For - each byte the detector receives, it will feed that byte to every active - state machine available, one byte at a time. The state machine changes its - state based on its previous state and the byte it receives. There are 3 - states in a state machine that are of interest to an auto-detector: - - START state: This is the state to start with, or a legal byte sequence - (i.e. a valid code point) for character has been identified. - - ME state: This indicates that the state machine identified a byte sequence - that is specific to the charset it is designed for and that - there is no other possible encoding which can contain this byte - sequence. This will to lead to an immediate positive answer for - the detector. - - ERROR state: This indicates the state machine identified an illegal byte - sequence for that encoding. This will lead to an immediate - negative answer for this encoding. Detector will exclude this - encoding from consideration from here on. - """ - def __init__(self, sm): - self._model = sm - self._curr_byte_pos = 0 - self._curr_char_len = 0 - self._curr_state = None - self.logger = logging.getLogger(__name__) - self.reset() - - def reset(self): - self._curr_state = MachineState.START - - def next_state(self, c): - # for each byte we get its class - # if it is first byte, we also get byte length - byte_class = self._model['class_table'][c] - if self._curr_state == MachineState.START: - self._curr_byte_pos = 0 - self._curr_char_len = self._model['char_len_table'][byte_class] - # from byte's class and state_table, we get its next state - curr_state = (self._curr_state * self._model['class_factor'] - + byte_class) - self._curr_state = self._model['state_table'][curr_state] - self._curr_byte_pos += 1 - return self._curr_state - - def get_current_charlen(self): - return self._curr_char_len - - def get_coding_state_machine(self): - return self._model['name'] - - @property - def language(self): - return self._model['language'] diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/compat.py deleted file mode 100644 index ddd7468..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/compat.py +++ /dev/null @@ -1,34 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# Contributor(s): -# Dan Blanchard -# Ian Cordasco -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -import sys - - -if sys.version_info < (3, 0): - PY2 = True - PY3 = False - base_str = (str, unicode) - text_type = unicode -else: - PY2 = False - PY3 = True - base_str = (bytes, str) - text_type = str diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cp949prober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/cp949prober.py deleted file mode 100644 index efd793a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/cp949prober.py +++ /dev/null @@ -1,49 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .chardistribution import EUCKRDistributionAnalysis -from .codingstatemachine import CodingStateMachine -from .mbcharsetprober import MultiByteCharSetProber -from .mbcssm import CP949_SM_MODEL - - -class CP949Prober(MultiByteCharSetProber): - def __init__(self): - super(CP949Prober, self).__init__() - self.coding_sm = CodingStateMachine(CP949_SM_MODEL) - # NOTE: CP949 is a superset of EUC-KR, so the distribution should be - # not different. - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "CP949" - - @property - def language(self): - return "Korean" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/enums.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/enums.py deleted file mode 100644 index 0451207..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/enums.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -All of the Enums that are used throughout the chardet package. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - - -class InputState(object): - """ - This enum represents the different states a universal detector can be in. - """ - PURE_ASCII = 0 - ESC_ASCII = 1 - HIGH_BYTE = 2 - - -class LanguageFilter(object): - """ - This enum represents the different language filters we can apply to a - ``UniversalDetector``. - """ - CHINESE_SIMPLIFIED = 0x01 - CHINESE_TRADITIONAL = 0x02 - JAPANESE = 0x04 - KOREAN = 0x08 - NON_CJK = 0x10 - ALL = 0x1F - CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL - CJK = CHINESE | JAPANESE | KOREAN - - -class ProbingState(object): - """ - This enum represents the different states a prober can be in. - """ - DETECTING = 0 - FOUND_IT = 1 - NOT_ME = 2 - - -class MachineState(object): - """ - This enum represents the different states a state machine can be in. - """ - START = 0 - ERROR = 1 - ITS_ME = 2 - - -class SequenceLikelihood(object): - """ - This enum represents the likelihood of a character following the previous one. - """ - NEGATIVE = 0 - UNLIKELY = 1 - LIKELY = 2 - POSITIVE = 3 - - @classmethod - def get_num_categories(cls): - """:returns: The number of likelihood categories in the enum.""" - return 4 - - -class CharacterCategory(object): - """ - This enum represents the different categories language models for - ``SingleByteCharsetProber`` put characters into. - - Anything less than CONTROL is considered a letter. - """ - UNDEFINED = 255 - LINE_BREAK = 254 - SYMBOL = 253 - DIGIT = 252 - CONTROL = 251 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/escprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/escprober.py deleted file mode 100644 index c70493f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/escprober.py +++ /dev/null @@ -1,101 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .codingstatemachine import CodingStateMachine -from .enums import LanguageFilter, ProbingState, MachineState -from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL, - ISO2022KR_SM_MODEL) - - -class EscCharSetProber(CharSetProber): - """ - This CharSetProber uses a "code scheme" approach for detecting encodings, - whereby easily recognizable escape or shift sequences are relied on to - identify these encodings. - """ - - def __init__(self, lang_filter=None): - super(EscCharSetProber, self).__init__(lang_filter=lang_filter) - self.coding_sm = [] - if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: - self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL)) - self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL)) - if self.lang_filter & LanguageFilter.JAPANESE: - self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) - if self.lang_filter & LanguageFilter.KOREAN: - self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None - self.reset() - - def reset(self): - super(EscCharSetProber, self).reset() - for coding_sm in self.coding_sm: - if not coding_sm: - continue - coding_sm.active = True - coding_sm.reset() - self.active_sm_count = len(self.coding_sm) - self._detected_charset = None - self._detected_language = None - - @property - def charset_name(self): - return self._detected_charset - - @property - def language(self): - return self._detected_language - - def get_confidence(self): - if self._detected_charset: - return 0.99 - else: - return 0.00 - - def feed(self, byte_str): - for c in byte_str: - for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: - continue - coding_state = coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - coding_sm.active = False - self.active_sm_count -= 1 - if self.active_sm_count <= 0: - self._state = ProbingState.NOT_ME - return self.state - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - self._detected_charset = coding_sm.get_coding_state_machine() - self._detected_language = coding_sm.language - return self.state - - return self.state diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/escsm.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/escsm.py deleted file mode 100644 index 0069523..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/escsm.py +++ /dev/null @@ -1,246 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -HZ_CLS = ( -1,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,0,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,4,0,5,2,0, # 78 - 7f -1,1,1,1,1,1,1,1, # 80 - 87 -1,1,1,1,1,1,1,1, # 88 - 8f -1,1,1,1,1,1,1,1, # 90 - 97 -1,1,1,1,1,1,1,1, # 98 - 9f -1,1,1,1,1,1,1,1, # a0 - a7 -1,1,1,1,1,1,1,1, # a8 - af -1,1,1,1,1,1,1,1, # b0 - b7 -1,1,1,1,1,1,1,1, # b8 - bf -1,1,1,1,1,1,1,1, # c0 - c7 -1,1,1,1,1,1,1,1, # c8 - cf -1,1,1,1,1,1,1,1, # d0 - d7 -1,1,1,1,1,1,1,1, # d8 - df -1,1,1,1,1,1,1,1, # e0 - e7 -1,1,1,1,1,1,1,1, # e8 - ef -1,1,1,1,1,1,1,1, # f0 - f7 -1,1,1,1,1,1,1,1, # f8 - ff -) - -HZ_ST = ( -MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17 - 5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f - 4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27 - 4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f -) - -HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -HZ_SM_MODEL = {'class_table': HZ_CLS, - 'class_factor': 6, - 'state_table': HZ_ST, - 'char_len_table': HZ_CHAR_LEN_TABLE, - 'name': "HZ-GB-2312", - 'language': 'Chinese'} - -ISO2022CN_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,0,0,0,0, # 20 - 27 -0,3,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,4,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022CN_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27 - 5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f -) - -ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS, - 'class_factor': 9, - 'state_table': ISO2022CN_ST, - 'char_len_table': ISO2022CN_CHAR_LEN_TABLE, - 'name': "ISO-2022-CN", - 'language': 'Chinese'} - -ISO2022JP_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,2,2, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,7,0,0,0, # 20 - 27 -3,0,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -6,0,4,0,8,0,0,0, # 40 - 47 -0,9,5,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022JP_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07 -MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17 -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47 -) - -ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) - -ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS, - 'class_factor': 10, - 'state_table': ISO2022JP_ST, - 'char_len_table': ISO2022JP_CHAR_LEN_TABLE, - 'name': "ISO-2022-JP", - 'language': 'Japanese'} - -ISO2022KR_CLS = ( -2,0,0,0,0,0,0,0, # 00 - 07 -0,0,0,0,0,0,0,0, # 08 - 0f -0,0,0,0,0,0,0,0, # 10 - 17 -0,0,0,1,0,0,0,0, # 18 - 1f -0,0,0,0,3,0,0,0, # 20 - 27 -0,4,0,0,0,0,0,0, # 28 - 2f -0,0,0,0,0,0,0,0, # 30 - 37 -0,0,0,0,0,0,0,0, # 38 - 3f -0,0,0,5,0,0,0,0, # 40 - 47 -0,0,0,0,0,0,0,0, # 48 - 4f -0,0,0,0,0,0,0,0, # 50 - 57 -0,0,0,0,0,0,0,0, # 58 - 5f -0,0,0,0,0,0,0,0, # 60 - 67 -0,0,0,0,0,0,0,0, # 68 - 6f -0,0,0,0,0,0,0,0, # 70 - 77 -0,0,0,0,0,0,0,0, # 78 - 7f -2,2,2,2,2,2,2,2, # 80 - 87 -2,2,2,2,2,2,2,2, # 88 - 8f -2,2,2,2,2,2,2,2, # 90 - 97 -2,2,2,2,2,2,2,2, # 98 - 9f -2,2,2,2,2,2,2,2, # a0 - a7 -2,2,2,2,2,2,2,2, # a8 - af -2,2,2,2,2,2,2,2, # b0 - b7 -2,2,2,2,2,2,2,2, # b8 - bf -2,2,2,2,2,2,2,2, # c0 - c7 -2,2,2,2,2,2,2,2, # c8 - cf -2,2,2,2,2,2,2,2, # d0 - d7 -2,2,2,2,2,2,2,2, # d8 - df -2,2,2,2,2,2,2,2, # e0 - e7 -2,2,2,2,2,2,2,2, # e8 - ef -2,2,2,2,2,2,2,2, # f0 - f7 -2,2,2,2,2,2,2,2, # f8 - ff -) - -ISO2022KR_ST = ( -MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f -MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17 -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f -MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27 -) - -ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) - -ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS, - 'class_factor': 6, - 'state_table': ISO2022KR_ST, - 'char_len_table': ISO2022KR_CHAR_LEN_TABLE, - 'name': "ISO-2022-KR", - 'language': 'Korean'} - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/eucjpprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/eucjpprober.py deleted file mode 100644 index 20ce8f7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/eucjpprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import ProbingState, MachineState -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCJPDistributionAnalysis -from .jpcntx import EUCJPContextAnalysis -from .mbcssm import EUCJP_SM_MODEL - - -class EUCJPProber(MultiByteCharSetProber): - def __init__(self): - super(EUCJPProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) - self.distribution_analyzer = EUCJPDistributionAnalysis() - self.context_analyzer = EUCJPContextAnalysis() - self.reset() - - def reset(self): - super(EUCJPProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return "EUC-JP" - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char, char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrfreq.py deleted file mode 100644 index b68078c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrfreq.py +++ /dev/null @@ -1,195 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology - -# 128 --> 0.79 -# 256 --> 0.92 -# 512 --> 0.986 -# 1024 --> 0.99944 -# 2048 --> 0.99999 -# -# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 -# Random Distribution Ration = 512 / (2350-512) = 0.279. -# -# Typical Distribution Ratio - -EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 - -EUCKR_TABLE_SIZE = 2352 - -# Char to FreqOrder table , -EUCKR_CHAR_TO_FREQ_ORDER = ( - 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, -1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, -1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, - 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, - 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, - 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, -1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, - 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, - 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, -1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, -1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, -1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, -1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, -1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, - 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, -1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, -1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, -1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, -1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, - 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, -1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, - 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, - 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, -1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, - 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, -1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, - 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, - 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, -1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, -1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, -1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, -1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, - 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, -1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, - 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, - 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, -1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, -1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, -1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, -1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, -1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, -1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, - 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, - 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, - 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, -1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, - 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, -1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, - 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, - 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, -2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, - 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, - 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, -2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, -2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, -2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, - 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, - 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, -2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, - 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, -1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, -2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, -1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, -2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, -2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, -1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, - 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, -2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, -2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, - 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, - 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, -2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, -1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, -2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, -2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, -2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, -2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, -2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, -2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, -1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, -2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, -2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, -2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, -2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, -2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, -1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, -1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, -2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, -1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, -2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, -1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, - 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, -2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, - 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, -2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, - 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, -2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, -2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, - 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, -2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, -1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, - 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, -1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, -2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, -1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, -2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, - 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, -2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, -1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, -2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, -1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, -2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, -1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, - 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, -2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, -2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, - 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, - 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, -1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, -1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, - 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, -2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, -2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, - 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, - 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, - 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, -2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, - 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, - 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, -2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, -2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, - 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, -2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, -1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, - 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, -2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, -2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, -2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, - 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, - 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, - 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, -2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, -2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, -2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, -1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, -2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, - 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 -) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrprober.py deleted file mode 100644 index 345a060..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euckrprober.py +++ /dev/null @@ -1,47 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCKRDistributionAnalysis -from .mbcssm import EUCKR_SM_MODEL - - -class EUCKRProber(MultiByteCharSetProber): - def __init__(self): - super(EUCKRProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) - self.distribution_analyzer = EUCKRDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-KR" - - @property - def language(self): - return "Korean" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwfreq.py deleted file mode 100644 index ed7a995..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwfreq.py +++ /dev/null @@ -1,387 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# EUCTW frequency table -# Converted from big5 work -# by Taiwan's Mandarin Promotion Council -# - -# 128 --> 0.42261 -# 256 --> 0.57851 -# 512 --> 0.74851 -# 1024 --> 0.89384 -# 2048 --> 0.97583 -# -# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 -# Random Distribution Ration = 512/(5401-512)=0.105 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR - -EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 - -# Char to FreqOrder table , -EUCTW_TABLE_SIZE = 5376 - -EUCTW_CHAR_TO_FREQ_ORDER = ( - 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 -3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 -1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 - 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 -3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 -4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 -7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 - 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 - 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 - 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 -2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 -1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 -3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 - 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 -1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 -3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 -2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 - 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 -3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 -1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 -7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 - 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 -7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 -1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 - 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 - 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 -3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 -3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 - 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 -2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 -2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 - 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 - 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 -3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 -1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 -1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 -1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 -2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 - 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 -4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 -1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 -7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 -2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 - 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 - 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 - 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 - 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 -7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 - 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 -1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 - 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 - 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 -7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 -1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 - 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 -3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 -4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 -3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 - 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 - 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 -1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 -4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 -3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 -3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 -2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 -7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 -3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 -7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 -1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 -2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 -1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 - 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 -1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 -4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 -3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 - 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 - 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 - 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 -2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 -7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 -1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 -2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 -1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 -1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 -7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 -7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 -7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 -3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 -4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 -1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 -7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 -2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 -7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 -3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 -3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 -7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 -2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 -7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 - 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 -4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 -2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 -7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 -3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 -2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 -2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 - 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 -2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 -1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 -1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 -2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 -1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 -7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 -7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 -2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 -4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 -1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 -7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 - 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 -4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 - 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 -2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 - 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 -1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 -1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 - 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 -3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 -3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 -1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 -3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 -7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 -7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 -1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 -2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 -1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 -3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 -2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 -3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 -2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 -4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 -4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 -3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 - 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 -3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 - 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 -3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 -3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 -3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 -1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 -7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 - 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 -7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 -1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 - 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 -4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 -3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 - 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 -2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 -2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 -3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 -1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 -4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 -2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 -1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 -1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 -2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 -3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 -1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 -7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 -1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 -4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 -1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 - 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 -1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 -3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 -3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 -2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 -1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 -4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 - 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 -7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 -2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 -3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 -4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 - 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 -7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 -7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 -1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 -4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 -3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 -2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 -3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 -3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 -2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 -1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 -4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 -3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 -3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 -2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 -4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 -7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 -3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 -2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 -3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 -1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 -2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 -3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 -4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 -2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 -2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 -7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 -1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 -2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 -1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 -3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 -4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 -2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 -3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 -3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 -2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 -4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 -2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 -3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 -4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 -7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 -3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 - 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 -1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 -4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 -1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 -4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 -7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 - 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 -7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 -2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 -1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 -1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 -3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 - 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 - 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 - 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 -3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 -2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 - 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 -7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 -1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 -3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 -7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 -1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 -7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 -4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 -1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 -2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 -2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 -4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 - 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 - 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 -3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 -3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 -1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 -2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 -7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 -1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 -1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 -3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 - 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 -1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 -4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 -7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 -2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 -3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 - 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 -1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 -2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 -2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 -7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 -7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 -7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 -2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 -2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 -1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 -4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 -3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 -3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 -4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 -4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 -2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 -2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 -7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 -4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 -7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 -2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 -1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 -3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 -4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 -2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 - 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 -2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 -1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 -2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 -2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 -4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 -7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 -1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 -3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 -7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 -1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 -8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 -2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 -8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 -2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 -2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 -8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 -8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 -8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 - 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 -8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 -4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 -3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 -8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 -1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 -8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 - 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 -1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 - 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 -4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 -1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 -4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 -1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 - 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 -3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 -4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 -8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 - 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 -3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 - 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 -2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 -) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwprober.py deleted file mode 100644 index 35669cc..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/euctwprober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import EUCTWDistributionAnalysis -from .mbcssm import EUCTW_SM_MODEL - -class EUCTWProber(MultiByteCharSetProber): - def __init__(self): - super(EUCTWProber, self).__init__() - self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) - self.distribution_analyzer = EUCTWDistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "EUC-TW" - - @property - def language(self): - return "Taiwan" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312freq.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312freq.py deleted file mode 100644 index 697837b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312freq.py +++ /dev/null @@ -1,283 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# GB2312 most frequently used character table -# -# Char to FreqOrder table , from hz6763 - -# 512 --> 0.79 -- 0.79 -# 1024 --> 0.92 -- 0.13 -# 2048 --> 0.98 -- 0.06 -# 6768 --> 1.00 -- 0.02 -# -# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 -# Random Distribution Ration = 512 / (3755 - 512) = 0.157 -# -# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR - -GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 - -GB2312_TABLE_SIZE = 3760 - -GB2312_CHAR_TO_FREQ_ORDER = ( -1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, -2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, -2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, - 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, -1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, -1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, - 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, -1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, -2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, -3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, - 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, -1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, - 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, -2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, - 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, -2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, -1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, -3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, - 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, -1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, - 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, -2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, -1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, -3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, -1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, -2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, -1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, - 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, -3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, -3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, - 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, -3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, - 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, -1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, -3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, -2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, -1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, - 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, -1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, -4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, - 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, -3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, -3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, - 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, -1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, -2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, -1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, -1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, - 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, -3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, -3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, -4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, - 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, -3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, -1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, -1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, -4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, - 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, - 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, -3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, -1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, - 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, -1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, -2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, - 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, - 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, - 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, -3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, -4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, -3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, - 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, -2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, -2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, -2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, - 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, -2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, - 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, - 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, - 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, -3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, -2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, -2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, -1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, - 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, -2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, - 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, - 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, -1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, -1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, - 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, - 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, -1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, -2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, -3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, -2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, -2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, -2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, -3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, -1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, -1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, -2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, -1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, -3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, -1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, -1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, -3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, - 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, -2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, -1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, -4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, -1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, -1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, -3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, -1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, - 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, - 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, -1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, - 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, -1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, -1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, - 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, -3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, -4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, -3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, -2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, -2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, -1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, -3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, -2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, -1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, -1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, - 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, -2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, -2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, -3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, -4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, -3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, - 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, -3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, -2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, -1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, - 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, - 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, -3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, -4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, -2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, -1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, -1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, - 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, -1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, -3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, - 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, - 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, -1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, - 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, -1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, - 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, -2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, - 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, -2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, -2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, -1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, -1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, -2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, - 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, -1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, -1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, -2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, -2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, -3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, -1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, -4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, - 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, - 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, -3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, -1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, - 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, -3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, -1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, -4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, -1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, -2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, -1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, - 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, -1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, -3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, - 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, -2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, - 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, -1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, -1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, -1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, -3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, -2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, -3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, -3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, -3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, - 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, -2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, - 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, -2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, - 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, -1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, - 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, - 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, -1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, -3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, -3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, -1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, -1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, -3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, -2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, -2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, -1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, -3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, - 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, -4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, -1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, -2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, -3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, -3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, -1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, - 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, - 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, -2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, - 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, -1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, - 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, -1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, -1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, -1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, -1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, -1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, - 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, - 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 -) - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312prober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312prober.py deleted file mode 100644 index 8446d2d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/gb2312prober.py +++ /dev/null @@ -1,46 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import GB2312DistributionAnalysis -from .mbcssm import GB2312_SM_MODEL - -class GB2312Prober(MultiByteCharSetProber): - def __init__(self): - super(GB2312Prober, self).__init__() - self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) - self.distribution_analyzer = GB2312DistributionAnalysis() - self.reset() - - @property - def charset_name(self): - return "GB2312" - - @property - def language(self): - return "Chinese" diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/hebrewprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/hebrewprober.py deleted file mode 100644 index b0e1bf4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/hebrewprober.py +++ /dev/null @@ -1,292 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Shy Shalom -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -# This prober doesn't actually recognize a language or a charset. -# It is a helper prober for the use of the Hebrew model probers - -### General ideas of the Hebrew charset recognition ### -# -# Four main charsets exist in Hebrew: -# "ISO-8859-8" - Visual Hebrew -# "windows-1255" - Logical Hebrew -# "ISO-8859-8-I" - Logical Hebrew -# "x-mac-hebrew" - ?? Logical Hebrew ?? -# -# Both "ISO" charsets use a completely identical set of code points, whereas -# "windows-1255" and "x-mac-hebrew" are two different proper supersets of -# these code points. windows-1255 defines additional characters in the range -# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific -# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6. -# x-mac-hebrew defines similar additional code points but with a different -# mapping. -# -# As far as an average Hebrew text with no diacritics is concerned, all four -# charsets are identical with respect to code points. Meaning that for the -# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters -# (including final letters). -# -# The dominant difference between these charsets is their directionality. -# "Visual" directionality means that the text is ordered as if the renderer is -# not aware of a BIDI rendering algorithm. The renderer sees the text and -# draws it from left to right. The text itself when ordered naturally is read -# backwards. A buffer of Visual Hebrew generally looks like so: -# "[last word of first line spelled backwards] [whole line ordered backwards -# and spelled backwards] [first word of first line spelled backwards] -# [end of line] [last word of second line] ... etc' " -# adding punctuation marks, numbers and English text to visual text is -# naturally also "visual" and from left to right. -# -# "Logical" directionality means the text is ordered "naturally" according to -# the order it is read. It is the responsibility of the renderer to display -# the text from right to left. A BIDI algorithm is used to place general -# punctuation marks, numbers and English text in the text. -# -# Texts in x-mac-hebrew are almost impossible to find on the Internet. From -# what little evidence I could find, it seems that its general directionality -# is Logical. -# -# To sum up all of the above, the Hebrew probing mechanism knows about two -# charsets: -# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are -# backwards while line order is natural. For charset recognition purposes -# the line order is unimportant (In fact, for this implementation, even -# word order is unimportant). -# Logical Hebrew - "windows-1255" - normal, naturally ordered text. -# -# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be -# specifically identified. -# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew -# that contain special punctuation marks or diacritics is displayed with -# some unconverted characters showing as question marks. This problem might -# be corrected using another model prober for x-mac-hebrew. Due to the fact -# that x-mac-hebrew texts are so rare, writing another model prober isn't -# worth the effort and performance hit. -# -#### The Prober #### -# -# The prober is divided between two SBCharSetProbers and a HebrewProber, -# all of which are managed, created, fed data, inquired and deleted by the -# SBCSGroupProber. The two SBCharSetProbers identify that the text is in -# fact some kind of Hebrew, Logical or Visual. The final decision about which -# one is it is made by the HebrewProber by combining final-letter scores -# with the scores of the two SBCharSetProbers to produce a final answer. -# -# The SBCSGroupProber is responsible for stripping the original text of HTML -# tags, English characters, numbers, low-ASCII punctuation characters, spaces -# and new lines. It reduces any sequence of such characters to a single space. -# The buffer fed to each prober in the SBCS group prober is pure text in -# high-ASCII. -# The two SBCharSetProbers (model probers) share the same language model: -# Win1255Model. -# The first SBCharSetProber uses the model normally as any other -# SBCharSetProber does, to recognize windows-1255, upon which this model was -# built. The second SBCharSetProber is told to make the pair-of-letter -# lookup in the language model backwards. This in practice exactly simulates -# a visual Hebrew model using the windows-1255 logical Hebrew model. -# -# The HebrewProber is not using any language model. All it does is look for -# final-letter evidence suggesting the text is either logical Hebrew or visual -# Hebrew. Disjointed from the model probers, the results of the HebrewProber -# alone are meaningless. HebrewProber always returns 0.00 as confidence -# since it never identifies a charset by itself. Instead, the pointer to the -# HebrewProber is passed to the model probers as a helper "Name Prober". -# When the Group prober receives a positive identification from any prober, -# it asks for the name of the charset identified. If the prober queried is a -# Hebrew model prober, the model prober forwards the call to the -# HebrewProber to make the final decision. In the HebrewProber, the -# decision is made according to the final-letters scores maintained and Both -# model probers scores. The answer is returned in the form of the name of the -# charset identified, either "windows-1255" or "ISO-8859-8". - -class HebrewProber(CharSetProber): - # windows-1255 / ISO-8859-8 code points of interest - FINAL_KAF = 0xea - NORMAL_KAF = 0xeb - FINAL_MEM = 0xed - NORMAL_MEM = 0xee - FINAL_NUN = 0xef - NORMAL_NUN = 0xf0 - FINAL_PE = 0xf3 - NORMAL_PE = 0xf4 - FINAL_TSADI = 0xf5 - NORMAL_TSADI = 0xf6 - - # Minimum Visual vs Logical final letter score difference. - # If the difference is below this, don't rely solely on the final letter score - # distance. - MIN_FINAL_CHAR_DISTANCE = 5 - - # Minimum Visual vs Logical model score difference. - # If the difference is below this, don't rely at all on the model score - # distance. - MIN_MODEL_DISTANCE = 0.01 - - VISUAL_HEBREW_NAME = "ISO-8859-8" - LOGICAL_HEBREW_NAME = "windows-1255" - - def __init__(self): - super(HebrewProber, self).__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None - self.reset() - - def reset(self): - self._final_char_logical_score = 0 - self._final_char_visual_score = 0 - # The two last characters seen in the previous buffer, - # mPrev and mBeforePrev are initialized to space in order to simulate - # a word delimiter at the beginning of the data - self._prev = ' ' - self._before_prev = ' ' - # These probers are owned by the group prober. - - def set_model_probers(self, logicalProber, visualProber): - self._logical_prober = logicalProber - self._visual_prober = visualProber - - def is_final(self, c): - return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN, - self.FINAL_PE, self.FINAL_TSADI] - - def is_non_final(self, c): - # The normal Tsadi is not a good Non-Final letter due to words like - # 'lechotet' (to chat) containing an apostrophe after the tsadi. This - # apostrophe is converted to a space in FilterWithoutEnglishLetters - # causing the Non-Final tsadi to appear at an end of a word even - # though this is not the case in the original text. - # The letters Pe and Kaf rarely display a related behavior of not being - # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' - # for example legally end with a Non-Final Pe or Kaf. However, the - # benefit of these letters as Non-Final letters outweighs the damage - # since these words are quite rare. - return c in [self.NORMAL_KAF, self.NORMAL_MEM, - self.NORMAL_NUN, self.NORMAL_PE] - - def feed(self, byte_str): - # Final letter analysis for logical-visual decision. - # Look for evidence that the received buffer is either logical Hebrew - # or visual Hebrew. - # The following cases are checked: - # 1) A word longer than 1 letter, ending with a final letter. This is - # an indication that the text is laid out "naturally" since the - # final letter really appears at the end. +1 for logical score. - # 2) A word longer than 1 letter, ending with a Non-Final letter. In - # normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, - # should not end with the Non-Final form of that letter. Exceptions - # to this rule are mentioned above in isNonFinal(). This is an - # indication that the text is laid out backwards. +1 for visual - # score - # 3) A word longer than 1 letter, starting with a final letter. Final - # letters should not appear at the beginning of a word. This is an - # indication that the text is laid out backwards. +1 for visual - # score. - # - # The visual score and logical score are accumulated throughout the - # text and are finally checked against each other in GetCharSetName(). - # No checking for final letters in the middle of words is done since - # that case is not an indication for either Logical or Visual text. - # - # We automatically filter out all 7-bit characters (replace them with - # spaces) so the word boundary detection works properly. [MAP] - - if self.state == ProbingState.NOT_ME: - # Both model probers say it's not them. No reason to continue. - return ProbingState.NOT_ME - - byte_str = self.filter_high_byte_only(byte_str) - - for cur in byte_str: - if cur == ' ': - # We stand on a space - a word just ended - if self._before_prev != ' ': - # next-to-last char was not a space so self._prev is not a - # 1 letter word - if self.is_final(self._prev): - # case (1) [-2:not space][-1:final letter][cur:space] - self._final_char_logical_score += 1 - elif self.is_non_final(self._prev): - # case (2) [-2:not space][-1:Non-Final letter][ - # cur:space] - self._final_char_visual_score += 1 - else: - # Not standing on a space - if ((self._before_prev == ' ') and - (self.is_final(self._prev)) and (cur != ' ')): - # case (3) [-2:space][-1:final letter][cur:not space] - self._final_char_visual_score += 1 - self._before_prev = self._prev - self._prev = cur - - # Forever detecting, till the end or until both model probers return - # ProbingState.NOT_ME (handled above) - return ProbingState.DETECTING - - @property - def charset_name(self): - # Make the decision: is it Logical or Visual? - # If the final letter score distance is dominant enough, rely on it. - finalsub = self._final_char_logical_score - self._final_char_visual_score - if finalsub >= self.MIN_FINAL_CHAR_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # It's not dominant enough, try to rely on the model scores instead. - modelsub = (self._logical_prober.get_confidence() - - self._visual_prober.get_confidence()) - if modelsub > self.MIN_MODEL_DISTANCE: - return self.LOGICAL_HEBREW_NAME - if modelsub < -self.MIN_MODEL_DISTANCE: - return self.VISUAL_HEBREW_NAME - - # Still no good, back to final letter distance, maybe it'll save the - # day. - if finalsub < 0.0: - return self.VISUAL_HEBREW_NAME - - # (finalsub > 0 - Logical) or (don't know what to do) default to - # Logical. - return self.LOGICAL_HEBREW_NAME - - @property - def language(self): - return 'Hebrew' - - @property - def state(self): - # Remain active as long as any of the model probers are active. - if (self._logical_prober.state == ProbingState.NOT_ME) and \ - (self._visual_prober.state == ProbingState.NOT_ME): - return ProbingState.NOT_ME - return ProbingState.DETECTING diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/jisfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/jisfreq.py deleted file mode 100644 index 83fc082..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/jisfreq.py +++ /dev/null @@ -1,325 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# Sampling from about 20M text materials include literature and computer technology -# -# Japanese frequency table, applied to both S-JIS and EUC-JP -# They are sorted in order. - -# 128 --> 0.77094 -# 256 --> 0.85710 -# 512 --> 0.92635 -# 1024 --> 0.97130 -# 2048 --> 0.99431 -# -# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58 -# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191 -# -# Typical Distribution Ratio, 25% of IDR - -JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0 - -# Char to FreqOrder table , -JIS_TABLE_SIZE = 4368 - -JIS_CHAR_TO_FREQ_ORDER = ( - 40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16 -3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32 -1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48 -2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64 -2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80 -5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96 -1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112 -5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128 -5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144 -5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160 -5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176 -5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192 -5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208 -1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224 -1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240 -1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256 -2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272 -3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288 -3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304 - 4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320 - 12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336 -1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352 - 109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368 -5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384 - 271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400 - 32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416 - 43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432 - 280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448 - 54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464 -5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480 -5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496 -5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512 -4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528 -5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544 -5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560 -5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576 -5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592 -5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608 -5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624 -5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640 -5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656 -5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672 -3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688 -5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704 -5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720 -5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736 -5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752 -5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768 -5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784 -5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800 -5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816 -5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832 -5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848 -5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864 -5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880 -5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896 -5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912 -5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928 -5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944 -5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960 -5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976 -5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992 -5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008 -5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024 -5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040 -5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056 -5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072 -5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088 -5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104 -5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120 -5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136 -5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152 -5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168 -5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184 -5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200 -5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216 -5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232 -5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248 -5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264 -5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280 -5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296 -6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312 -6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328 -6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344 -6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360 -6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376 -6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392 -6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408 -6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424 -4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440 - 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456 - 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472 -1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488 -1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504 - 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520 -3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536 -3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552 - 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568 -3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584 -3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600 - 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616 -2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632 - 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648 -3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664 -1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680 - 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696 -1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712 - 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728 -2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744 -2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760 -2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776 -2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792 -1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808 -1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824 -1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840 -1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856 -2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872 -1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888 -2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904 -1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920 -1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936 -1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952 -1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968 -1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984 -1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000 - 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016 - 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032 -1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048 -2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064 -2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080 -2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096 -3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112 -3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128 - 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144 -3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160 -1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176 - 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192 -2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208 -1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224 - 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240 -3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256 -4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272 -2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288 -1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304 -2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320 -1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336 - 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352 - 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368 -1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384 -2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400 -2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416 -2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432 -3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448 -1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464 -2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480 - 359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496 - 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512 - 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528 -1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544 -2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560 - 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576 -1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592 -1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608 - 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624 -1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640 -1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656 -1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672 - 764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688 -2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704 - 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720 -2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736 -3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752 -2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768 -1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784 -6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800 -1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816 -2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832 -1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848 - 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864 - 72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880 -3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896 -3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912 -1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928 -1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944 -1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960 -1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976 - 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992 - 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008 -2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024 - 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040 -3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056 -2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072 - 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088 -1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104 -2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120 - 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136 -1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152 - 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168 -4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184 -2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200 -1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216 - 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232 -1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248 -2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264 - 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280 -6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296 -1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312 -1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328 -2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344 -3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360 - 914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376 -3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392 -1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408 - 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424 -1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440 - 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456 -3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472 - 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488 -2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504 - 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520 -4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536 -2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552 -1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568 -1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584 -1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600 - 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616 -1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632 -3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648 -1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664 -3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680 - 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696 - 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712 - 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728 -2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744 -1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760 - 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776 -1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792 - 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808 -1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824 - 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840 - 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856 - 480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872 -1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888 -1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904 -2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920 -4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936 - 227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952 -1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968 - 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984 -1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000 -3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016 -1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032 -2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048 -2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064 -1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080 -1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096 -2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112 - 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128 -2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144 -1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160 -1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176 -1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192 -1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208 -3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224 -2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240 -2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256 - 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272 -3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288 -3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304 -1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320 -2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336 -1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352 -2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512 -) - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/jpcntx.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/jpcntx.py deleted file mode 100644 index 20044e4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/jpcntx.py +++ /dev/null @@ -1,233 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - - -# This is hiragana 2-char sequence table, the number in each cell represents its frequency category -jp2CharContext = ( -(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1), -(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4), -(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2), -(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4), -(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4), -(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3), -(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3), -(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3), -(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4), -(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3), -(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4), -(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3), -(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5), -(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3), -(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5), -(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4), -(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4), -(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3), -(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3), -(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3), -(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5), -(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4), -(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5), -(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3), -(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4), -(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4), -(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4), -(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1), -(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0), -(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3), -(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0), -(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3), -(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3), -(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5), -(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4), -(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5), -(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3), -(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3), -(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3), -(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3), -(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4), -(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4), -(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2), -(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3), -(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3), -(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3), -(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3), -(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4), -(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3), -(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4), -(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3), -(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3), -(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4), -(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4), -(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3), -(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4), -(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4), -(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3), -(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4), -(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4), -(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4), -(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3), -(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2), -(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2), -(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3), -(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3), -(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5), -(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3), -(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4), -(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4), -(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0), -(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3), -(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1), -(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2), -(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3), -(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1), -) - -class JapaneseContextAnalysis(object): - NUM_OF_CATEGORY = 6 - DONT_KNOW = -1 - ENOUGH_REL_THRESHOLD = 100 - MAX_REL_THRESHOLD = 1000 - MINIMUM_DATA_THRESHOLD = 4 - - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None - self.reset() - - def reset(self): - self._total_rel = 0 # total sequence received - # category counters, each integer counts sequence in its category - self._rel_sample = [0] * self.NUM_OF_CATEGORY - # if last byte in current buffer is not the last byte of a character, - # we need to know how many bytes to skip in next buffer - self._need_to_skip_char_num = 0 - self._last_char_order = -1 # The order of previous char - # If this flag is set to True, detection is done and conclusion has - # been made - self._done = False - - def feed(self, byte_str, num_bytes): - if self._done: - return - - # The buffer we got is byte oriented, and a character may span in more than one - # buffers. In case the last one or two byte in last buffer is not - # complete, we record how many byte needed to complete that character - # and skip these bytes here. We can choose to record those bytes as - # well and analyse the character once it is complete, but since a - # character will not make much difference, by simply skipping - # this character will simply our logic and improve performance. - i = self._need_to_skip_char_num - while i < num_bytes: - order, char_len = self.get_order(byte_str[i:i + 2]) - i += char_len - if i > num_bytes: - self._need_to_skip_char_num = i - num_bytes - self._last_char_order = -1 - else: - if (order != -1) and (self._last_char_order != -1): - self._total_rel += 1 - if self._total_rel > self.MAX_REL_THRESHOLD: - self._done = True - break - self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1 - self._last_char_order = order - - def got_enough_data(self): - return self._total_rel > self.ENOUGH_REL_THRESHOLD - - def get_confidence(self): - # This is just one way to calculate confidence. It works well for me. - if self._total_rel > self.MINIMUM_DATA_THRESHOLD: - return (self._total_rel - self._rel_sample[0]) / self._total_rel - else: - return self.DONT_KNOW - - def get_order(self, byte_str): - return -1, 1 - -class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): - super(SJISContextAnalysis, self).__init__() - self._charset_name = "SHIFT_JIS" - - @property - def charset_name(self): - return self._charset_name - - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC): - char_len = 2 - if (first_char == 0x87) or (0xFA <= first_char <= 0xFC): - self._charset_name = "CP932" - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 202) and (0x9F <= second_char <= 0xF1): - return second_char - 0x9F, char_len - - return -1, char_len - -class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): - if not byte_str: - return -1, 1 - # find out current char's byte length - first_char = byte_str[0] - if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE): - char_len = 2 - elif first_char == 0x8F: - char_len = 3 - else: - char_len = 1 - - # return its order if it is hiragana - if len(byte_str) > 1: - second_char = byte_str[1] - if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3): - return second_char - 0xA1, char_len - - return -1, char_len - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langbulgarianmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langbulgarianmodel.py deleted file mode 100644 index 2aa4fb2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langbulgarianmodel.py +++ /dev/null @@ -1,228 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -# this table is modified base on win1251BulgarianCharToOrderMap, so -# only number <64 is sure valid - -Latin5_BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 -210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 - 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 - 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 -) - -win1251BulgarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 -110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 -253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 -116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 -206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 -221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 - 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 - 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 - 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 - 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 - 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 - 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 96.9392% -# first 1024 sequences:3.0618% -# rest sequences: 0.2992% -# negative sequences: 0.0020% -BulgarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, -3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, -0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, -0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, -0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, -0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, -0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, -2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, -3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, -3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, -1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, -3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, -1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, -2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, -2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, -3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, -1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, -2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, -2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, -1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, -2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, -2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, -2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, -1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, -2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, -1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, -3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, -1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, -3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, -1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, -2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, -1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, -2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, -1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, -2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, -1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, -2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, -1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, -2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, -1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, -0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, -1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, -1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, -1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, -0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, -1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, -1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, -1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -) - -Latin5BulgarianModel = { - 'char_to_order_map': Latin5_BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Bulgairan', -} - -Win1251BulgarianModel = { - 'char_to_order_map': win1251BulgarianCharToOrderMap, - 'precedence_matrix': BulgarianLangModel, - 'typical_positive_ratio': 0.969392, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Bulgarian', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langcyrillicmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langcyrillicmodel.py deleted file mode 100644 index e5f9a1f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langcyrillicmodel.py +++ /dev/null @@ -1,333 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# KOI8-R language model -# Character Mapping Table: -KOI8R_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 -223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 -238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 - 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 - 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 - 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 - 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 -) - -win1251_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -) - -latin5_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -macCyrillic_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, -239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, -) - -IBM855_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 -191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, -206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, - 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, -220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, -230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, - 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, - 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, -250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, -) - -IBM866_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 -155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 -253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 - 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 - 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, - 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, - 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, -191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, -207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, -223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, - 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, -239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 97.6601% -# first 1024 sequences: 2.3389% -# rest sequences: 0.1237% -# negative sequences: 0.0009% -RussianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, -0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, -0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, -1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, -1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, -2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, -1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, -3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, -1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, -2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, -1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, -1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, -1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, -1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, -3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, -1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, -2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, -1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, -2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, -0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, -1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, -1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, -1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, -3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, -3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, -1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, -1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, -0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, -1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, -1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, -0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, -1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, -2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, -1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, -1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, -2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, -1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, -0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, -2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, -1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, -1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, -0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, -0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, -1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, -0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, -0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, -2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, -0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -) - -Koi8rModel = { - 'char_to_order_map': KOI8R_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "KOI8-R", - 'language': 'Russian', -} - -Win1251CyrillicModel = { - 'char_to_order_map': win1251_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "windows-1251", - 'language': 'Russian', -} - -Latin5CyrillicModel = { - 'char_to_order_map': latin5_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-5", - 'language': 'Russian', -} - -MacCyrillicModel = { - 'char_to_order_map': macCyrillic_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "MacCyrillic", - 'language': 'Russian', -} - -Ibm866Model = { - 'char_to_order_map': IBM866_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM866", - 'language': 'Russian', -} - -Ibm855Model = { - 'char_to_order_map': IBM855_char_to_order_map, - 'precedence_matrix': RussianLangModel, - 'typical_positive_ratio': 0.976601, - 'keep_english_letter': False, - 'charset_name': "IBM855", - 'language': 'Russian', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langgreekmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langgreekmodel.py deleted file mode 100644 index 5332221..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langgreekmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin7_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -win1253_char_to_order_map = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85, # 40 - 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253, # 50 -253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55, # 60 - 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253, # 70 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 80 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 90 -253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253, # a0 -253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123, # b0 -110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39, # c0 - 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15, # d0 -124, 1, 29, 20, 21, 3, 32, 13, 25, 5, 11, 16, 10, 6, 30, 4, # e0 - 9, 8, 14, 7, 2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253, # f0 -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.2851% -# first 1024 sequences:1.7001% -# rest sequences: 0.0359% -# negative sequences: 0.0148% -GreekLangModel = ( -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0, -2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0, -2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0, -2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0, -0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0, -3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0, -3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0, -2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0, -2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0, -0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0, -0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0, -0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2, -0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0, -0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2, -0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0, -0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2, -0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2, -0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0, -0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0, -0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0, -0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0, -0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2, -0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0, -0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2, -0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2, -0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0, -0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1, -0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2, -0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2, -0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0, -0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0, -0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1, -0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0, -0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0, -0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin7GreekModel = { - 'char_to_order_map': Latin7_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "ISO-8859-7", - 'language': 'Greek', -} - -Win1253GreekModel = { - 'char_to_order_map': win1253_char_to_order_map, - 'precedence_matrix': GreekLangModel, - 'typical_positive_ratio': 0.982851, - 'keep_english_letter': False, - 'charset_name': "windows-1253", - 'language': 'Greek', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhebrewmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhebrewmodel.py deleted file mode 100644 index 58f4c87..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhebrewmodel.py +++ /dev/null @@ -1,200 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Simon Montagu -# Portions created by the Initial Developer are Copyright (C) 2005 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Shoshannah Forbes - original C code (?) -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Windows-1255 language model -# Character Mapping Table: -WIN1255_CHAR_TO_ORDER_MAP = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40 - 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50 -253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60 - 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70 -124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214, -215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221, - 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227, -106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234, - 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237, -238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250, - 9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23, - 12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 98.4004% -# first 1024 sequences: 1.5981% -# rest sequences: 0.087% -# negative sequences: 0.0015% -HEBREW_LANG_MODEL = ( -0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0, -3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2, -1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2, -1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3, -1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2, -1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2, -0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2, -1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1, -0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0, -0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2, -0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2, -0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2, -0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2, -0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2, -0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0, -3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2, -0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2, -0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2, -0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2, -0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0, -3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3, -0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0, -0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1, -0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2, -0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0, -0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1, -1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1, -0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1, -1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1, -2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1, -2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0, -0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1, -0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0, -) - -Win1255HebrewModel = { - 'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP, - 'precedence_matrix': HEBREW_LANG_MODEL, - 'typical_positive_ratio': 0.984004, - 'keep_english_letter': False, - 'charset_name': "windows-1255", - 'language': 'Hebrew', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhungarianmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhungarianmodel.py deleted file mode 100644 index bb7c095..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langhungarianmodel.py +++ /dev/null @@ -1,225 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin2_HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174, -175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205, - 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241, - 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85, -245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -win1250HungarianCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47, - 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253, -253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8, - 23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253, -161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176, -177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190, -191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205, - 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220, -221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231, -232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241, - 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87, -245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 94.7368% -# first 1024 sequences:5.2623% -# rest sequences: 0.8894% -# negative sequences: 0.0009% -HungarianLangModel = ( -0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3, -3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2, -3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0, -3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2, -0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0, -3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2, -0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1, -0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0, -3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0, -1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0, -1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1, -3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1, -2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0, -2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1, -2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1, -2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1, -2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1, -1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1, -1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1, -3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0, -1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1, -1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1, -2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1, -2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0, -2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1, -3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1, -2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1, -1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0, -1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1, -2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0, -1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1, -2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0, -1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0, -1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0, -2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1, -2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1, -2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1, -1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1, -1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0, -0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1, -2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1, -1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1, -2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1, -1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0, -1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0, -2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0, -2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1, -2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0, -1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0, -2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0, -0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0, -2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -) - -Latin2HungarianModel = { - 'char_to_order_map': Latin2_HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-2", - 'language': 'Hungarian', -} - -Win1250HungarianModel = { - 'char_to_order_map': win1250HungarianCharToOrderMap, - 'precedence_matrix': HungarianLangModel, - 'typical_positive_ratio': 0.947368, - 'keep_english_letter': True, - 'charset_name': "windows-1250", - 'language': 'Hungarian', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langthaimodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langthaimodel.py deleted file mode 100644 index 15f94c2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langthaimodel.py +++ /dev/null @@ -1,199 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# The following result for thai was collected from a limited sample (1M). - -# Character Mapping Table: -TIS620CharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 -253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 -252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 -253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40 -188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50 -253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60 - 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70 -209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222, -223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235, -236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57, - 49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54, - 45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63, - 22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244, - 11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247, - 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253, -) - -# Model Table: -# total sequences: 100% -# first 512 sequences: 92.6386% -# first 1024 sequences:7.3177% -# rest sequences: 1.0230% -# negative sequences: 0.0436% -ThaiLangModel = ( -0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3, -0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2, -3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3, -0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2, -3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1, -3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2, -3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1, -3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1, -3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1, -2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1, -3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1, -0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2, -1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0, -3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3, -3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0, -1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2, -0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3, -0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0, -3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1, -2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0, -3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2, -0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2, -3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, -3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0, -2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, -3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1, -2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1, -3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1, -3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0, -3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1, -3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1, -3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1, -1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2, -0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3, -0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1, -3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0, -3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1, -1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0, -3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1, -3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2, -0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0, -0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0, -1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1, -1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1, -3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1, -0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, -0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0, -3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0, -0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1, -0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1, -0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1, -0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0, -0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1, -0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0, -3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0, -0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0, -0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0, -3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1, -2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1, -0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0, -3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0, -1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0, -1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, -1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0, -1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -TIS620ThaiModel = { - 'char_to_order_map': TIS620CharToOrderMap, - 'precedence_matrix': ThaiLangModel, - 'typical_positive_ratio': 0.926386, - 'keep_english_letter': False, - 'charset_name': "TIS-620", - 'language': 'Thai', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langturkishmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/langturkishmodel.py deleted file mode 100644 index a427a45..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/langturkishmodel.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Communicator client code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Özgür Baskın - Turkish Language Model -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -# 255: Control characters that usually does not exist in any text -# 254: Carriage/Return -# 253: symbol (punctuation) that does not belong to word -# 252: 0 - 9 - -# Character Mapping Table: -Latin5_TurkishCharToOrderMap = ( -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, -255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42, - 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255, -255, 1, 21, 28, 12, 2, 18, 27, 25, 3, 24, 10, 5, 13, 4, 15, - 26, 64, 7, 8, 9, 14, 32, 57, 58, 11, 22,255,255,255,255,255, -180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165, -164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106, -150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136, - 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125, -124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119, - 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86, - 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96, - 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17, 6, 19,107, -) - -TurkishLangModel = ( -3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3, -3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3, -3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1, -3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3, -3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1, -3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2, -2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1, -3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0, -1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2, -3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1, -3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2, -2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2, -2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0, -3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2, -3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3, -0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1, -3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0, -3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1, -1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1, -3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3, -2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1, -3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0, -0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0, -1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2, -3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1, -1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0, -3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0, -0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0, -3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1, -1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0, -1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3, -2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1, -2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0, -0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1, -2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0, -3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0, -2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0, -0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, -3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1, -1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, -1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2, -0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1, -3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1, -0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0, -3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2, -1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0, -3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0, -1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2, -2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1, -0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0, -3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0, -0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0, -0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0, -3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0, -0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0, -0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0, -3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0, -0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1, -3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0, -0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1, -0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0, -3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0, -3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0, -0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0, -3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0, -0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0, -0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0, -0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0, -3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0, -0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0, -3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0, -0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0, -0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0, -0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0, -2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0, -1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, -3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0, -0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1, -0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0, -3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0, -0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0, -2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0, -2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0, -0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0, -1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1, -0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, -0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, -) - -Latin5TurkishModel = { - 'char_to_order_map': Latin5_TurkishCharToOrderMap, - 'precedence_matrix': TurkishLangModel, - 'typical_positive_ratio': 0.970290, - 'keep_english_letter': True, - 'charset_name': "ISO-8859-9", - 'language': 'Turkish', -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/latin1prober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/latin1prober.py deleted file mode 100644 index 7d1e8c2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/latin1prober.py +++ /dev/null @@ -1,145 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState - -FREQ_CAT_NUM = 4 - -UDF = 0 # undefined -OTH = 1 # other -ASC = 2 # ascii capital letter -ASS = 3 # ascii small letter -ACV = 4 # accent capital vowel -ACO = 5 # accent capital other -ASV = 6 # accent small vowel -ASO = 7 # accent small other -CLASS_NUM = 8 # total classes - -Latin1_CharToClass = ( - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F - OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F - ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 - ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F - OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F - ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 - ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F - OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 - OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F - UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 - OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 - OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF - ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 - ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF - ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 - ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF - ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 - ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF - ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 - ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF -) - -# 0 : illegal -# 1 : very unlikely -# 2 : normal -# 3 : very likely -Latin1ClassModel = ( -# UDF OTH ASC ASS ACV ACO ASV ASO - 0, 0, 0, 0, 0, 0, 0, 0, # UDF - 0, 3, 3, 3, 3, 3, 3, 3, # OTH - 0, 3, 3, 3, 3, 3, 3, 3, # ASC - 0, 3, 3, 3, 1, 1, 3, 3, # ASS - 0, 3, 3, 3, 1, 2, 1, 2, # ACV - 0, 3, 3, 3, 3, 3, 3, 3, # ACO - 0, 3, 1, 3, 1, 1, 1, 3, # ASV - 0, 3, 1, 3, 1, 1, 3, 3, # ASO -) - - -class Latin1Prober(CharSetProber): - def __init__(self): - super(Latin1Prober, self).__init__() - self._last_char_class = None - self._freq_counter = None - self.reset() - - def reset(self): - self._last_char_class = OTH - self._freq_counter = [0] * FREQ_CAT_NUM - CharSetProber.reset(self) - - @property - def charset_name(self): - return "ISO-8859-1" - - @property - def language(self): - return "" - - def feed(self, byte_str): - byte_str = self.filter_with_english_letters(byte_str) - for c in byte_str: - char_class = Latin1_CharToClass[c] - freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) - + char_class] - if freq == 0: - self._state = ProbingState.NOT_ME - break - self._freq_counter[freq] += 1 - self._last_char_class = char_class - - return self.state - - def get_confidence(self): - if self.state == ProbingState.NOT_ME: - return 0.01 - - total = sum(self._freq_counter) - if total < 0.01: - confidence = 0.0 - else: - confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) - / total) - if confidence < 0.0: - confidence = 0.0 - # lower the confidence of latin1 so that other more accurate - # detector can take priority. - confidence = confidence * 0.73 - return confidence diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcharsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcharsetprober.py deleted file mode 100644 index 6256ecf..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcharsetprober.py +++ /dev/null @@ -1,91 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState - - -class MultiByteCharSetProber(CharSetProber): - """ - MultiByteCharSetProber - """ - - def __init__(self, lang_filter=None): - super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] - - def reset(self): - super(MultiByteCharSetProber, self).reset() - if self.coding_sm: - self.coding_sm.reset() - if self.distribution_analyzer: - self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError - - @property - def language(self): - raise NotImplementedError - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.distribution_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - return self.distribution_analyzer.get_confidence() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcsgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcsgroupprober.py deleted file mode 100644 index 530abe7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcsgroupprober.py +++ /dev/null @@ -1,54 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# Proofpoint, Inc. -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .utf8prober import UTF8Prober -from .sjisprober import SJISProber -from .eucjpprober import EUCJPProber -from .gb2312prober import GB2312Prober -from .euckrprober import EUCKRProber -from .cp949prober import CP949Prober -from .big5prober import Big5Prober -from .euctwprober import EUCTWProber - - -class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): - super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) - self.probers = [ - UTF8Prober(), - SJISProber(), - EUCJPProber(), - GB2312Prober(), - EUCKRProber(), - CP949Prober(), - Big5Prober(), - EUCTWProber() - ] - self.reset() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcssm.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcssm.py deleted file mode 100644 index 8360d0f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/mbcssm.py +++ /dev/null @@ -1,572 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .enums import MachineState - -# BIG5 - -BIG5_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 4,4,4,4,4,4,4,4, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 4,3,3,3,3,3,3,3, # a0 - a7 - 3,3,3,3,3,3,3,3, # a8 - af - 3,3,3,3,3,3,3,3, # b0 - b7 - 3,3,3,3,3,3,3,3, # b8 - bf - 3,3,3,3,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -BIG5_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17 -) - -BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) - -BIG5_SM_MODEL = {'class_table': BIG5_CLS, - 'class_factor': 5, - 'state_table': BIG5_ST, - 'char_len_table': BIG5_CHAR_LEN_TABLE, - 'name': 'Big5'} - -# CP949 - -CP949_CLS = ( - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f - 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f - 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f - 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f - 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f - 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f - 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f - 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f - 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f - 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af - 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf - 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef - 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff -) - -CP949_ST = ( -#cls= 0 1 2 3 4 5 6 7 8 9 # previous state = - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START, 4, 5,MachineState.ERROR, 6, # MachineState.START - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5 - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6 -) - -CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) - -CP949_SM_MODEL = {'class_table': CP949_CLS, - 'class_factor': 10, - 'state_table': CP949_ST, - 'char_len_table': CP949_CHAR_LEN_TABLE, - 'name': 'CP949'} - -# EUC-JP - -EUCJP_CLS = ( - 4,4,4,4,4,4,4,4, # 00 - 07 - 4,4,4,4,4,4,5,5, # 08 - 0f - 4,4,4,4,4,4,4,4, # 10 - 17 - 4,4,4,5,4,4,4,4, # 18 - 1f - 4,4,4,4,4,4,4,4, # 20 - 27 - 4,4,4,4,4,4,4,4, # 28 - 2f - 4,4,4,4,4,4,4,4, # 30 - 37 - 4,4,4,4,4,4,4,4, # 38 - 3f - 4,4,4,4,4,4,4,4, # 40 - 47 - 4,4,4,4,4,4,4,4, # 48 - 4f - 4,4,4,4,4,4,4,4, # 50 - 57 - 4,4,4,4,4,4,4,4, # 58 - 5f - 4,4,4,4,4,4,4,4, # 60 - 67 - 4,4,4,4,4,4,4,4, # 68 - 6f - 4,4,4,4,4,4,4,4, # 70 - 77 - 4,4,4,4,4,4,4,4, # 78 - 7f - 5,5,5,5,5,5,5,5, # 80 - 87 - 5,5,5,5,5,5,1,3, # 88 - 8f - 5,5,5,5,5,5,5,5, # 90 - 97 - 5,5,5,5,5,5,5,5, # 98 - 9f - 5,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,0,5 # f8 - ff -) - -EUCJP_ST = ( - 3, 4, 3, 5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 3,MachineState.ERROR,#18-1f - 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27 -) - -EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) - -EUCJP_SM_MODEL = {'class_table': EUCJP_CLS, - 'class_factor': 6, - 'state_table': EUCJP_ST, - 'char_len_table': EUCJP_CHAR_LEN_TABLE, - 'name': 'EUC-JP'} - -# EUC-KR - -EUCKR_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,3,3,3, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,3,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 2,2,2,2,2,2,2,2, # e0 - e7 - 2,2,2,2,2,2,2,2, # e8 - ef - 2,2,2,2,2,2,2,2, # f0 - f7 - 2,2,2,2,2,2,2,0 # f8 - ff -) - -EUCKR_ST = ( - MachineState.ERROR,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f -) - -EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) - -EUCKR_SM_MODEL = {'class_table': EUCKR_CLS, - 'class_factor': 4, - 'state_table': EUCKR_ST, - 'char_len_table': EUCKR_CHAR_LEN_TABLE, - 'name': 'EUC-KR'} - -# EUC-TW - -EUCTW_CLS = ( - 2,2,2,2,2,2,2,2, # 00 - 07 - 2,2,2,2,2,2,0,0, # 08 - 0f - 2,2,2,2,2,2,2,2, # 10 - 17 - 2,2,2,0,2,2,2,2, # 18 - 1f - 2,2,2,2,2,2,2,2, # 20 - 27 - 2,2,2,2,2,2,2,2, # 28 - 2f - 2,2,2,2,2,2,2,2, # 30 - 37 - 2,2,2,2,2,2,2,2, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,2, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,6,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,3,4,4,4,4,4,4, # a0 - a7 - 5,5,1,1,1,1,1,1, # a8 - af - 1,1,1,1,1,1,1,1, # b0 - b7 - 1,1,1,1,1,1,1,1, # b8 - bf - 1,1,3,1,3,3,3,3, # c0 - c7 - 3,3,3,3,3,3,3,3, # c8 - cf - 3,3,3,3,3,3,3,3, # d0 - d7 - 3,3,3,3,3,3,3,3, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,3,3,3, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,3,3,0 # f8 - ff -) - -EUCTW_ST = ( - MachineState.ERROR,MachineState.ERROR,MachineState.START, 3, 3, 3, 4,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17 - MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27 - MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) - -EUCTW_SM_MODEL = {'class_table': EUCTW_CLS, - 'class_factor': 7, - 'state_table': EUCTW_ST, - 'char_len_table': EUCTW_CHAR_LEN_TABLE, - 'name': 'x-euc-tw'} - -# GB2312 - -GB2312_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 3,3,3,3,3,3,3,3, # 30 - 37 - 3,3,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,4, # 78 - 7f - 5,6,6,6,6,6,6,6, # 80 - 87 - 6,6,6,6,6,6,6,6, # 88 - 8f - 6,6,6,6,6,6,6,6, # 90 - 97 - 6,6,6,6,6,6,6,6, # 98 - 9f - 6,6,6,6,6,6,6,6, # a0 - a7 - 6,6,6,6,6,6,6,6, # a8 - af - 6,6,6,6,6,6,6,6, # b0 - b7 - 6,6,6,6,6,6,6,6, # b8 - bf - 6,6,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 6,6,6,6,6,6,6,6, # e0 - e7 - 6,6,6,6,6,6,6,6, # e8 - ef - 6,6,6,6,6,6,6,6, # f0 - f7 - 6,6,6,6,6,6,6,0 # f8 - ff -) - -GB2312_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, 3,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17 - 4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27 - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f -) - -# To be accurate, the length of class 6 can be either 2 or 4. -# But it is not necessary to discriminate between the two since -# it is used for frequency analysis only, and we are validating -# each code range there as well. So it is safe to set it to be -# 2 here. -GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) - -GB2312_SM_MODEL = {'class_table': GB2312_CLS, - 'class_factor': 7, - 'state_table': GB2312_ST, - 'char_len_table': GB2312_CHAR_LEN_TABLE, - 'name': 'GB2312'} - -# Shift_JIS - -SJIS_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 2,2,2,2,2,2,2,2, # 40 - 47 - 2,2,2,2,2,2,2,2, # 48 - 4f - 2,2,2,2,2,2,2,2, # 50 - 57 - 2,2,2,2,2,2,2,2, # 58 - 5f - 2,2,2,2,2,2,2,2, # 60 - 67 - 2,2,2,2,2,2,2,2, # 68 - 6f - 2,2,2,2,2,2,2,2, # 70 - 77 - 2,2,2,2,2,2,2,1, # 78 - 7f - 3,3,3,3,3,2,2,3, # 80 - 87 - 3,3,3,3,3,3,3,3, # 88 - 8f - 3,3,3,3,3,3,3,3, # 90 - 97 - 3,3,3,3,3,3,3,3, # 98 - 9f - #0xa0 is illegal in sjis encoding, but some pages does - #contain such byte. We need to be more error forgiven. - 2,2,2,2,2,2,2,2, # a0 - a7 - 2,2,2,2,2,2,2,2, # a8 - af - 2,2,2,2,2,2,2,2, # b0 - b7 - 2,2,2,2,2,2,2,2, # b8 - bf - 2,2,2,2,2,2,2,2, # c0 - c7 - 2,2,2,2,2,2,2,2, # c8 - cf - 2,2,2,2,2,2,2,2, # d0 - d7 - 2,2,2,2,2,2,2,2, # d8 - df - 3,3,3,3,3,3,3,3, # e0 - e7 - 3,3,3,3,3,4,4,4, # e8 - ef - 3,3,3,3,3,3,3,3, # f0 - f7 - 3,3,3,3,3,0,0,0) # f8 - ff - - -SJIS_ST = ( - MachineState.ERROR,MachineState.START,MachineState.START, 3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17 -) - -SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) - -SJIS_SM_MODEL = {'class_table': SJIS_CLS, - 'class_factor': 6, - 'state_table': SJIS_ST, - 'char_len_table': SJIS_CHAR_LEN_TABLE, - 'name': 'Shift_JIS'} - -# UCS2-BE - -UCS2BE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2BE_ST = ( - 5, 7, 7,MachineState.ERROR, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,#10-17 - 6, 6, 6, 6, 6,MachineState.ITS_ME, 6, 6,#18-1f - 6, 6, 6, 6, 5, 7, 7,MachineState.ERROR,#20-27 - 5, 8, 6, 6,MachineState.ERROR, 6, 6, 6,#28-2f - 6, 6, 6, 6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) - -UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS, - 'class_factor': 6, - 'state_table': UCS2BE_ST, - 'char_len_table': UCS2BE_CHAR_LEN_TABLE, - 'name': 'UTF-16BE'} - -# UCS2-LE - -UCS2LE_CLS = ( - 0,0,0,0,0,0,0,0, # 00 - 07 - 0,0,1,0,0,2,0,0, # 08 - 0f - 0,0,0,0,0,0,0,0, # 10 - 17 - 0,0,0,3,0,0,0,0, # 18 - 1f - 0,0,0,0,0,0,0,0, # 20 - 27 - 0,3,3,3,3,3,0,0, # 28 - 2f - 0,0,0,0,0,0,0,0, # 30 - 37 - 0,0,0,0,0,0,0,0, # 38 - 3f - 0,0,0,0,0,0,0,0, # 40 - 47 - 0,0,0,0,0,0,0,0, # 48 - 4f - 0,0,0,0,0,0,0,0, # 50 - 57 - 0,0,0,0,0,0,0,0, # 58 - 5f - 0,0,0,0,0,0,0,0, # 60 - 67 - 0,0,0,0,0,0,0,0, # 68 - 6f - 0,0,0,0,0,0,0,0, # 70 - 77 - 0,0,0,0,0,0,0,0, # 78 - 7f - 0,0,0,0,0,0,0,0, # 80 - 87 - 0,0,0,0,0,0,0,0, # 88 - 8f - 0,0,0,0,0,0,0,0, # 90 - 97 - 0,0,0,0,0,0,0,0, # 98 - 9f - 0,0,0,0,0,0,0,0, # a0 - a7 - 0,0,0,0,0,0,0,0, # a8 - af - 0,0,0,0,0,0,0,0, # b0 - b7 - 0,0,0,0,0,0,0,0, # b8 - bf - 0,0,0,0,0,0,0,0, # c0 - c7 - 0,0,0,0,0,0,0,0, # c8 - cf - 0,0,0,0,0,0,0,0, # d0 - d7 - 0,0,0,0,0,0,0,0, # d8 - df - 0,0,0,0,0,0,0,0, # e0 - e7 - 0,0,0,0,0,0,0,0, # e8 - ef - 0,0,0,0,0,0,0,0, # f0 - f7 - 0,0,0,0,0,0,4,5 # f8 - ff -) - -UCS2LE_ST = ( - 6, 6, 7, 6, 4, 3,MachineState.ERROR,MachineState.ERROR,#00-07 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f - MachineState.ITS_ME,MachineState.ITS_ME, 5, 5, 5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17 - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR, 6, 6,#18-1f - 7, 6, 8, 8, 5, 5, 5,MachineState.ERROR,#20-27 - 5, 5, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5,#28-2f - 5, 5, 5,MachineState.ERROR, 5,MachineState.ERROR,MachineState.START,MachineState.START #30-37 -) - -UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) - -UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS, - 'class_factor': 6, - 'state_table': UCS2LE_ST, - 'char_len_table': UCS2LE_CHAR_LEN_TABLE, - 'name': 'UTF-16LE'} - -# UTF-8 - -UTF8_CLS = ( - 1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value - 1,1,1,1,1,1,0,0, # 08 - 0f - 1,1,1,1,1,1,1,1, # 10 - 17 - 1,1,1,0,1,1,1,1, # 18 - 1f - 1,1,1,1,1,1,1,1, # 20 - 27 - 1,1,1,1,1,1,1,1, # 28 - 2f - 1,1,1,1,1,1,1,1, # 30 - 37 - 1,1,1,1,1,1,1,1, # 38 - 3f - 1,1,1,1,1,1,1,1, # 40 - 47 - 1,1,1,1,1,1,1,1, # 48 - 4f - 1,1,1,1,1,1,1,1, # 50 - 57 - 1,1,1,1,1,1,1,1, # 58 - 5f - 1,1,1,1,1,1,1,1, # 60 - 67 - 1,1,1,1,1,1,1,1, # 68 - 6f - 1,1,1,1,1,1,1,1, # 70 - 77 - 1,1,1,1,1,1,1,1, # 78 - 7f - 2,2,2,2,3,3,3,3, # 80 - 87 - 4,4,4,4,4,4,4,4, # 88 - 8f - 4,4,4,4,4,4,4,4, # 90 - 97 - 4,4,4,4,4,4,4,4, # 98 - 9f - 5,5,5,5,5,5,5,5, # a0 - a7 - 5,5,5,5,5,5,5,5, # a8 - af - 5,5,5,5,5,5,5,5, # b0 - b7 - 5,5,5,5,5,5,5,5, # b8 - bf - 0,0,6,6,6,6,6,6, # c0 - c7 - 6,6,6,6,6,6,6,6, # c8 - cf - 6,6,6,6,6,6,6,6, # d0 - d7 - 6,6,6,6,6,6,6,6, # d8 - df - 7,8,8,8,8,8,8,8, # e0 - e7 - 8,8,8,8,8,9,8,8, # e8 - ef - 10,11,11,11,11,11,11,11, # f0 - f7 - 12,13,13,13,14,15,0,0 # f8 - ff -) - -UTF8_ST = ( - MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12, 10,#00-07 - 9, 11, 8, 7, 6, 5, 4, 3,#08-0f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27 - MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f - MachineState.ERROR,MachineState.ERROR, 5, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#30-37 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5, 5, 5,MachineState.ERROR,MachineState.ERROR,#40-47 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f - MachineState.ERROR,MachineState.ERROR, 7, 7, 7, 7,MachineState.ERROR,MachineState.ERROR,#50-57 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 7, 7,MachineState.ERROR,MachineState.ERROR,#60-67 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f - MachineState.ERROR,MachineState.ERROR, 9, 9, 9, 9,MachineState.ERROR,MachineState.ERROR,#70-77 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 9,MachineState.ERROR,MachineState.ERROR,#80-87 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f - MachineState.ERROR,MachineState.ERROR, 12, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,#90-97 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 12,MachineState.ERROR,MachineState.ERROR,#a0-a7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af - MachineState.ERROR,MachineState.ERROR, 12, 12, 12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf - MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7 - MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf -) - -UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) - -UTF8_SM_MODEL = {'class_table': UTF8_CLS, - 'class_factor': 16, - 'state_table': UTF8_ST, - 'char_len_table': UTF8_CHAR_LEN_TABLE, - 'name': 'UTF-8'} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcharsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcharsetprober.py deleted file mode 100644 index 0adb51d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcharsetprober.py +++ /dev/null @@ -1,132 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import CharacterCategory, ProbingState, SequenceLikelihood - - -class SingleByteCharSetProber(CharSetProber): - SAMPLE_SIZE = 64 - SB_ENOUGH_REL_THRESHOLD = 1024 # 0.25 * SAMPLE_SIZE^2 - POSITIVE_SHORTCUT_THRESHOLD = 0.95 - NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - - def __init__(self, model, reversed=False, name_prober=None): - super(SingleByteCharSetProber, self).__init__() - self._model = model - # TRUE if we need to reverse every pair in the model lookup - self._reversed = reversed - # Optional auxiliary prober for name decision - self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._freq_char = None - self.reset() - - def reset(self): - super(SingleByteCharSetProber, self).reset() - # char order of last character - self._last_order = 255 - self._seq_counters = [0] * SequenceLikelihood.get_num_categories() - self._total_seqs = 0 - self._total_char = 0 - # characters that fall in our sampling range - self._freq_char = 0 - - @property - def charset_name(self): - if self._name_prober: - return self._name_prober.charset_name - else: - return self._model['charset_name'] - - @property - def language(self): - if self._name_prober: - return self._name_prober.language - else: - return self._model.get('language') - - def feed(self, byte_str): - if not self._model['keep_english_letter']: - byte_str = self.filter_international_words(byte_str) - if not byte_str: - return self.state - char_to_order_map = self._model['char_to_order_map'] - for i, c in enumerate(byte_str): - # XXX: Order is in range 1-64, so one would think we want 0-63 here, - # but that leads to 27 more test failures than before. - order = char_to_order_map[c] - # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but - # CharacterCategory.SYMBOL is actually 253, so we use CONTROL - # to make it closer to the original intent. The only difference - # is whether or not we count digits and control characters for - # _total_char purposes. - if order < CharacterCategory.CONTROL: - self._total_char += 1 - if order < self.SAMPLE_SIZE: - self._freq_char += 1 - if self._last_order < self.SAMPLE_SIZE: - self._total_seqs += 1 - if not self._reversed: - i = (self._last_order * self.SAMPLE_SIZE) + order - model = self._model['precedence_matrix'][i] - else: # reverse the order of the letters in the lookup - i = (order * self.SAMPLE_SIZE) + self._last_order - model = self._model['precedence_matrix'][i] - self._seq_counters[model] += 1 - self._last_order = order - - charset_name = self._model['charset_name'] - if self.state == ProbingState.DETECTING: - if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD: - confidence = self.get_confidence() - if confidence > self.POSITIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, we have a winner', - charset_name, confidence) - self._state = ProbingState.FOUND_IT - elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD: - self.logger.debug('%s confidence = %s, below negative ' - 'shortcut threshhold %s', charset_name, - confidence, - self.NEGATIVE_SHORTCUT_THRESHOLD) - self._state = ProbingState.NOT_ME - - return self.state - - def get_confidence(self): - r = 0.01 - if self._total_seqs > 0: - r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) / - self._total_seqs / self._model['typical_positive_ratio']) - r = r * self._freq_char / self._total_char - if r >= 1.0: - r = 0.99 - return r diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcsgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcsgroupprober.py deleted file mode 100644 index 98e95dc..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sbcsgroupprober.py +++ /dev/null @@ -1,73 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetgroupprober import CharSetGroupProber -from .sbcharsetprober import SingleByteCharSetProber -from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel, - Latin5CyrillicModel, MacCyrillicModel, - Ibm866Model, Ibm855Model) -from .langgreekmodel import Latin7GreekModel, Win1253GreekModel -from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel -# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel -from .langthaimodel import TIS620ThaiModel -from .langhebrewmodel import Win1255HebrewModel -from .hebrewprober import HebrewProber -from .langturkishmodel import Latin5TurkishModel - - -class SBCSGroupProber(CharSetGroupProber): - def __init__(self): - super(SBCSGroupProber, self).__init__() - self.probers = [ - SingleByteCharSetProber(Win1251CyrillicModel), - SingleByteCharSetProber(Koi8rModel), - SingleByteCharSetProber(Latin5CyrillicModel), - SingleByteCharSetProber(MacCyrillicModel), - SingleByteCharSetProber(Ibm866Model), - SingleByteCharSetProber(Ibm855Model), - SingleByteCharSetProber(Latin7GreekModel), - SingleByteCharSetProber(Win1253GreekModel), - SingleByteCharSetProber(Latin5BulgarianModel), - SingleByteCharSetProber(Win1251BulgarianModel), - # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250) - # after we retrain model. - # SingleByteCharSetProber(Latin2HungarianModel), - # SingleByteCharSetProber(Win1250HungarianModel), - SingleByteCharSetProber(TIS620ThaiModel), - SingleByteCharSetProber(Latin5TurkishModel), - ] - hebrew_prober = HebrewProber() - logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, - False, hebrew_prober) - visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True, - hebrew_prober) - hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober) - self.probers.extend([hebrew_prober, logical_hebrew_prober, - visual_hebrew_prober]) - - self.reset() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sjisprober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/sjisprober.py deleted file mode 100644 index 9e29623..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/sjisprober.py +++ /dev/null @@ -1,92 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .mbcharsetprober import MultiByteCharSetProber -from .codingstatemachine import CodingStateMachine -from .chardistribution import SJISDistributionAnalysis -from .jpcntx import SJISContextAnalysis -from .mbcssm import SJIS_SM_MODEL -from .enums import ProbingState, MachineState - - -class SJISProber(MultiByteCharSetProber): - def __init__(self): - super(SJISProber, self).__init__() - self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) - self.distribution_analyzer = SJISDistributionAnalysis() - self.context_analyzer = SJISContextAnalysis() - self.reset() - - def reset(self): - super(SJISProber, self).reset() - self.context_analyzer.reset() - - @property - def charset_name(self): - return self.context_analyzer.charset_name - - @property - def language(self): - return "Japanese" - - def feed(self, byte_str): - for i in range(len(byte_str)): - coding_state = self.coding_sm.next_state(byte_str[i]) - if coding_state == MachineState.ERROR: - self.logger.debug('%s %s prober hit error at byte %s', - self.charset_name, self.language, i) - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - char_len = self.coding_sm.get_current_charlen() - if i == 0: - self._last_char[1] = byte_str[0] - self.context_analyzer.feed(self._last_char[2 - char_len:], - char_len) - self.distribution_analyzer.feed(self._last_char, char_len) - else: - self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3 - - char_len], char_len) - self.distribution_analyzer.feed(byte_str[i - 1:i + 1], - char_len) - - self._last_char[0] = byte_str[-1] - - if self.state == ProbingState.DETECTING: - if (self.context_analyzer.got_enough_data() and - (self.get_confidence() > self.SHORTCUT_THRESHOLD)): - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - context_conf = self.context_analyzer.get_confidence() - distrib_conf = self.distribution_analyzer.get_confidence() - return max(context_conf, distrib_conf) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/universaldetector.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/universaldetector.py deleted file mode 100644 index 7b4e92d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/universaldetector.py +++ /dev/null @@ -1,286 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is Mozilla Universal charset detector code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 2001 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# Shy Shalom - original C code -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### -""" -Module containing the UniversalDetector detector class, which is the primary -class a user of ``chardet`` should use. - -:author: Mark Pilgrim (initial port to Python) -:author: Shy Shalom (original C code) -:author: Dan Blanchard (major refactoring for 3.0) -:author: Ian Cordasco -""" - - -import codecs -import logging -import re - -from .charsetgroupprober import CharSetGroupProber -from .enums import InputState, LanguageFilter, ProbingState -from .escprober import EscCharSetProber -from .latin1prober import Latin1Prober -from .mbcsgroupprober import MBCSGroupProber -from .sbcsgroupprober import SBCSGroupProber - - -class UniversalDetector(object): - """ - The ``UniversalDetector`` class underlies the ``chardet.detect`` function - and coordinates all of the different charset probers. - - To get a ``dict`` containing an encoding and its confidence, you can simply - run: - - .. code:: - - u = UniversalDetector() - u.feed(some_bytes) - u.close() - detected = u.result - - """ - - MINIMUM_THRESHOLD = 0.20 - HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]') - ESC_DETECTOR = re.compile(b'(\033|~{)') - WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]') - ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252', - 'iso-8859-2': 'Windows-1250', - 'iso-8859-5': 'Windows-1251', - 'iso-8859-6': 'Windows-1256', - 'iso-8859-7': 'Windows-1253', - 'iso-8859-8': 'Windows-1255', - 'iso-8859-9': 'Windows-1254', - 'iso-8859-13': 'Windows-1257'} - - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None - self.lang_filter = lang_filter - self.logger = logging.getLogger(__name__) - self._has_win_bytes = None - self.reset() - - def reset(self): - """ - Reset the UniversalDetector and all of its probers back to their - initial states. This is called by ``__init__``, so you only need to - call this directly in between analyses of different documents. - """ - self.result = {'encoding': None, 'confidence': 0.0, 'language': None} - self.done = False - self._got_data = False - self._has_win_bytes = False - self._input_state = InputState.PURE_ASCII - self._last_char = b'' - if self._esc_charset_prober: - self._esc_charset_prober.reset() - for prober in self._charset_probers: - prober.reset() - - def feed(self, byte_str): - """ - Takes a chunk of a document and feeds it through all of the relevant - charset probers. - - After calling ``feed``, you can check the value of the ``done`` - attribute to see if you need to continue feeding the - ``UniversalDetector`` more data, or if it has made a prediction - (in the ``result`` attribute). - - .. note:: - You should always call ``close`` when you're done feeding in your - document if ``done`` is not already ``True``. - """ - if self.done: - return - - if not len(byte_str): - return - - if not isinstance(byte_str, bytearray): - byte_str = bytearray(byte_str) - - # First check for known BOMs, since these are guaranteed to be correct - if not self._got_data: - # If the data starts with BOM, we know it is UTF - if byte_str.startswith(codecs.BOM_UTF8): - # EF BB BF UTF-8 with BOM - self.result = {'encoding': "UTF-8-SIG", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_UTF32_LE, - codecs.BOM_UTF32_BE)): - # FF FE 00 00 UTF-32, little-endian BOM - # 00 00 FE FF UTF-32, big-endian BOM - self.result = {'encoding': "UTF-32", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\xFE\xFF\x00\x00'): - # FE FF 00 00 UCS-4, unusual octet order BOM (3412) - self.result = {'encoding': "X-ISO-10646-UCS-4-3412", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith(b'\x00\x00\xFF\xFE'): - # 00 00 FF FE UCS-4, unusual octet order BOM (2143) - self.result = {'encoding': "X-ISO-10646-UCS-4-2143", - 'confidence': 1.0, - 'language': ''} - elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): - # FF FE UTF-16, little endian BOM - # FE FF UTF-16, big endian BOM - self.result = {'encoding': "UTF-16", - 'confidence': 1.0, - 'language': ''} - - self._got_data = True - if self.result['encoding'] is not None: - self.done = True - return - - # If none of those matched and we've only see ASCII so far, check - # for high bytes and escape sequences - if self._input_state == InputState.PURE_ASCII: - if self.HIGH_BYTE_DETECTOR.search(byte_str): - self._input_state = InputState.HIGH_BYTE - elif self._input_state == InputState.PURE_ASCII and \ - self.ESC_DETECTOR.search(self._last_char + byte_str): - self._input_state = InputState.ESC_ASCII - - self._last_char = byte_str[-1:] - - # If we've seen escape sequences, use the EscCharSetProber, which - # uses a simple state machine to check for known escape sequences in - # HZ and ISO-2022 encodings, since those are the only encodings that - # use such sequences. - if self._input_state == InputState.ESC_ASCII: - if not self._esc_charset_prober: - self._esc_charset_prober = EscCharSetProber(self.lang_filter) - if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': - self._esc_charset_prober.charset_name, - 'confidence': - self._esc_charset_prober.get_confidence(), - 'language': - self._esc_charset_prober.language} - self.done = True - # If we've seen high bytes (i.e., those with values greater than 127), - # we need to do more complicated checks using all our multi-byte and - # single-byte probers that are left. The single-byte probers - # use character bigram distributions to determine the encoding, whereas - # the multi-byte probers use a combination of character unigram and - # bigram distributions. - elif self._input_state == InputState.HIGH_BYTE: - if not self._charset_probers: - self._charset_probers = [MBCSGroupProber(self.lang_filter)] - # If we're checking non-CJK encodings, use single-byte prober - if self.lang_filter & LanguageFilter.NON_CJK: - self._charset_probers.append(SBCSGroupProber()) - self._charset_probers.append(Latin1Prober()) - for prober in self._charset_probers: - if prober.feed(byte_str) == ProbingState.FOUND_IT: - self.result = {'encoding': prober.charset_name, - 'confidence': prober.get_confidence(), - 'language': prober.language} - self.done = True - break - if self.WIN_BYTE_DETECTOR.search(byte_str): - self._has_win_bytes = True - - def close(self): - """ - Stop analyzing the current document and come up with a final - prediction. - - :returns: The ``result`` attribute, a ``dict`` with the keys - `encoding`, `confidence`, and `language`. - """ - # Don't bother with checks if we're already done - if self.done: - return self.result - self.done = True - - if not self._got_data: - self.logger.debug('no data received!') - - # Default to ASCII if it is all we've seen so far - elif self._input_state == InputState.PURE_ASCII: - self.result = {'encoding': 'ascii', - 'confidence': 1.0, - 'language': ''} - - # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD - elif self._input_state == InputState.HIGH_BYTE: - prober_confidence = None - max_prober_confidence = 0.0 - max_prober = None - for prober in self._charset_probers: - if not prober: - continue - prober_confidence = prober.get_confidence() - if prober_confidence > max_prober_confidence: - max_prober_confidence = prober_confidence - max_prober = prober - if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): - charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() - confidence = max_prober.get_confidence() - # Use Windows encoding name instead of ISO-8859 if we saw any - # extra Windows-specific bytes - if lower_charset_name.startswith('iso-8859'): - if self._has_win_bytes: - charset_name = self.ISO_WIN_MAP.get(lower_charset_name, - charset_name) - self.result = {'encoding': charset_name, - 'confidence': confidence, - 'language': max_prober.language} - - # Log all prober confidences if none met MINIMUM_THRESHOLD - if self.logger.getEffectiveLevel() == logging.DEBUG: - if self.result['encoding'] is None: - self.logger.debug('no probers hit minimum threshold') - for group_prober in self._charset_probers: - if not group_prober: - continue - if isinstance(group_prober, CharSetGroupProber): - for prober in group_prober.probers: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - else: - self.logger.debug('%s %s confidence = %s', - prober.charset_name, - prober.language, - prober.get_confidence()) - return self.result diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/utf8prober.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/utf8prober.py deleted file mode 100644 index 6c3196c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/utf8prober.py +++ /dev/null @@ -1,82 +0,0 @@ -######################## BEGIN LICENSE BLOCK ######################## -# The Original Code is mozilla.org code. -# -# The Initial Developer of the Original Code is -# Netscape Communications Corporation. -# Portions created by the Initial Developer are Copyright (C) 1998 -# the Initial Developer. All Rights Reserved. -# -# Contributor(s): -# Mark Pilgrim - port to Python -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA -# 02110-1301 USA -######################### END LICENSE BLOCK ######################### - -from .charsetprober import CharSetProber -from .enums import ProbingState, MachineState -from .codingstatemachine import CodingStateMachine -from .mbcssm import UTF8_SM_MODEL - - - -class UTF8Prober(CharSetProber): - ONE_CHAR_PROB = 0.5 - - def __init__(self): - super(UTF8Prober, self).__init__() - self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None - self.reset() - - def reset(self): - super(UTF8Prober, self).reset() - self.coding_sm.reset() - self._num_mb_chars = 0 - - @property - def charset_name(self): - return "utf-8" - - @property - def language(self): - return "" - - def feed(self, byte_str): - for c in byte_str: - coding_state = self.coding_sm.next_state(c) - if coding_state == MachineState.ERROR: - self._state = ProbingState.NOT_ME - break - elif coding_state == MachineState.ITS_ME: - self._state = ProbingState.FOUND_IT - break - elif coding_state == MachineState.START: - if self.coding_sm.get_current_charlen() >= 2: - self._num_mb_chars += 1 - - if self.state == ProbingState.DETECTING: - if self.get_confidence() > self.SHORTCUT_THRESHOLD: - self._state = ProbingState.FOUND_IT - - return self.state - - def get_confidence(self): - unlike = 0.99 - if self._num_mb_chars < 6: - unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars - return 1.0 - unlike - else: - return unlike diff --git a/.tox/py37-normal/lib/python3.7/site-packages/chardet/version.py b/.tox/py37-normal/lib/python3.7/site-packages/chardet/version.py deleted file mode 100644 index bb2a34a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/chardet/version.py +++ /dev/null @@ -1,9 +0,0 @@ -""" -This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. - -:author: Dan Blanchard (dan.blanchard@gmail.com) -""" - -__version__ = "3.0.4" -VERSION = __version__.split('.') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/easy_install.py b/.tox/py37-normal/lib/python3.7/site-packages/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/LICENSE.rst b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/LICENSE.rst deleted file mode 100644 index 3ee64fb..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/LICENSE.rst +++ /dev/null @@ -1,80 +0,0 @@ -License -------- - -Copyright (c) 2013-2018, Kim Davies. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -#. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -#. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -#. Neither the name of the copyright holder nor the names of the - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY - EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE - USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH - DAMAGE. - -Portions of the codec implementation and unit tests are derived from the -Python standard library, which carries the `Python Software Foundation -License `_: - - Copyright (c) 2001-2014 Python Software Foundation; All Rights Reserved - -Portions of the unit tests are derived from the Unicode standard, which -is subject to the Unicode, Inc. License Agreement: - - Copyright (c) 1991-2014 Unicode, Inc. All rights reserved. - Distributed under the Terms of Use in - . - - Permission is hereby granted, free of charge, to any person obtaining - a copy of the Unicode data files and any associated documentation - (the "Data Files") or Unicode software and any associated documentation - (the "Software") to deal in the Data Files or Software - without restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, and/or sell copies of - the Data Files or Software, and to permit persons to whom the Data Files - or Software are furnished to do so, provided that - - (a) this copyright and permission notice appear with all copies - of the Data Files or Software, - - (b) this copyright and permission notice appear in associated - documentation, and - - (c) there is clear notice in each modified Data File or in the Software - as well as in the documentation associated with the Data File(s) or - Software that the data or software has been modified. - - THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF - ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE - WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - NONINFRINGEMENT OF THIRD PARTY RIGHTS. - IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS - NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL - DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, - DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER - TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - PERFORMANCE OF THE DATA FILES OR SOFTWARE. - - Except as contained in this notice, the name of a copyright holder - shall not be used in advertising or otherwise to promote the sale, - use or other dealings in these Data Files or Software without prior - written authorization of the copyright holder. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/METADATA deleted file mode 100644 index 30fde02..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/METADATA +++ /dev/null @@ -1,239 +0,0 @@ -Metadata-Version: 2.1 -Name: idna -Version: 2.8 -Summary: Internationalized Domain Names in Applications (IDNA) -Home-page: https://github.com/kjd/idna -Author: Kim Davies -Author-email: kim@cynosure.com.au -License: BSD-like -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for the Internationalised Domain Names in Applications -(IDNA) protocol as specified in `RFC 5891 `_. -This is the latest version of the protocol and is sometimes referred to as -“IDNA 2008”. - -This library also provides support for Unicode Technical Standard 46, -`Unicode IDNA Compatibility Processing `_. - -This acts as a suitable replacement for the “encodings.idna” module that -comes with the Python standard library, but only supports the -old, deprecated IDNA specification (`RFC 3490 `_). - -Basic functions are simply executed: - -.. code-block:: pycon - - # Python 3 - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - # Python 2 - >>> import idna - >>> idna.encode(u'ドメイン.テスト') - 'xn--eckwd4c7c.xn--zckzah' - >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') - ドメイン.テスト - -Packages --------- - -The latest tagged release version is published in the PyPI repository: - -.. image:: https://badge.fury.io/py/idna.svg - :target: http://badge.fury.io/py/idna - - -Installation ------------- - -To install this library, you can use pip: - -.. code-block:: bash - - $ pip install idna - -Alternatively, you can install the package using the bundled setup script: - -.. code-block:: bash - - $ python setup.py install - -This library works with Python 2.7 and Python 3.4 or later. - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a domain -name argument and perform a conversion to A-labels or U-labels respectively. - -.. code-block:: pycon - - # Python 3 - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -You may use the codec encoding and decoding methods using the -``idna.codec`` module: - -.. code-block:: pycon - - # Python 2 - >>> import idna.codec - >>> print u'домена.испытание'.encode('idna') - xn--80ahd1agd.xn--80akhbyknj4f - >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') - домена.испытание - -Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` -functions if necessary: - -.. code-block:: pycon - - # Python 2 - >>> idna.alabel(u'测试') - 'xn--0zwm56d' - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -As described in `RFC 5895 `_, the IDNA -specification no longer normalizes input from different potential ways a user -may input a domain name. This functionality, known as a “mapping”, is now -considered by the specification to be a local user-interface issue distinct -from IDNA conversion functionality. - -This library provides one such mapping, that was developed by the Unicode -Consortium. Known as `Unicode IDNA Compatibility Processing `_, -it provides for both a regular mapping for typical applications, as well as -a transitional mapping to help migrate from older IDNA 2003 applications. - -For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL -LETTER K* is not allowed (nor are capital letters in general). UTS 46 will -convert this into lower case prior to applying the IDNA conversion. - -.. code-block:: pycon - - # Python 3 - >>> import idna - >>> idna.encode(u'Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - -Transitional processing provides conversions to help transition from the older -2003 standard to the current standard. For example, in the original IDNA -specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two -*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this -conversion is not performed. - -.. code-block:: pycon - - # Python 2 - >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) - 'xn--knigsgsschen-lcb0w' - -Implementors should use transitional processing with caution, only in rare -cases where conversion from legacy labels to current labels must be performed -(i.e. IDNA implementations that pre-date 2008). For typical applications -that just need to convert labels, transitional processing is unlikely to be -beneficial and could produce unexpected incompatible results. - -``encodings.idna`` Compatibility -++++++++++++++++++++++++++++++++ - -Function calls from the Python built-in ``encodings.idna`` module are -mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. -Simply substitute the ``import`` clause in your code to refer to the -new module name. - -Exceptions ----------- - -All errors raised during the conversion following the specification should -raise an exception derived from the ``idna.IDNAError`` base class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and right-to-left -characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is -an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` -when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO -or CONTEXTJ but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup tables for -performance. These tables are derived from computing against eligibility criteria -in the respective standards. These tables are computed using the command-line -script ``tools/idna-data``. - -This tool will fetch relevant tables from the Unicode Consortium and perform the -required calculations to identify eligibility. It has three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, - the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors - who wish to track this library against a different Unicode version may use this tool - to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` - files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC - 5892 and the pre-computed tables published by `IANA `_. - -* ``idna-data U+0061``. Prints debugging output on the various properties - associated with an individual Unicode codepoint (in this case, U+0061), that are - used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging - or analysis. - -The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, -the ``--version`` argument allows the specification of the version of Unicode to use -in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` -will generate library data against Unicode 9.0.0. - -Note that this script requires Python 3, but all generated library data will work -in Python 2.7. - - -Testing -------- - -The library has a test suite based on each rule of the IDNA specification, as -well as tests that are provided as part of the Unicode Technical Standard 46, -`Unicode IDNA Compatibility Processing `_. - -The tests are run automatically on each commit at Travis CI: - -.. image:: https://travis-ci.org/kjd/idna.svg?branch=master - :target: https://travis-ci.org/kjd/idna - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/RECORD deleted file mode 100644 index 8d4bc61..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -idna-2.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-2.8.dist-info/LICENSE.rst,sha256=DUvHq9SNz7FOJCVO5AQGZzf_AWcUTiIpFKIRO4eUaD4,3947 -idna-2.8.dist-info/METADATA,sha256=X4QsM_BLMPhl4gC8SEnXjvl5-gj7hvwAl7UCyR418so,8862 -idna-2.8.dist-info/RECORD,, -idna-2.8.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110 -idna-2.8.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 -idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58 -idna/__pycache__/__init__.cpython-37.pyc,, -idna/__pycache__/codec.cpython-37.pyc,, -idna/__pycache__/compat.cpython-37.pyc,, -idna/__pycache__/core.cpython-37.pyc,, -idna/__pycache__/idnadata.cpython-37.pyc,, -idna/__pycache__/intranges.cpython-37.pyc,, -idna/__pycache__/package_data.cpython-37.pyc,, -idna/__pycache__/uts46data.cpython-37.pyc,, -idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299 -idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232 -idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733 -idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899 -idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749 -idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21 -idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/WHEEL deleted file mode 100644 index dea0e20..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.32.2) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/top_level.txt deleted file mode 100644 index c40472e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna-2.8.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -idna diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/__init__.py deleted file mode 100644 index 847bf93..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .package_data import __version__ -from .core import * diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/codec.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/codec.py deleted file mode 100644 index 98c65ea..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/codec.py +++ /dev/null @@ -1,118 +0,0 @@ -from .core import encode, decode, alabel, ulabel, IDNAError -import codecs -import re - -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -class Codec(codecs.Codec): - - def encode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return "", 0 - - return encode(data), len(data) - - def decode(self, data, errors='strict'): - - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return u"", 0 - - return decode(data), len(data) - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return ("", 0) - - labels = _unicode_dots_re.split(data) - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = '.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = '.' - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data, errors, final): - if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) - - if not data: - return (u"", 0) - - # IDNA allows decoding to operate on Unicode strings, too. - if isinstance(data, unicode): - labels = _unicode_dots_re.split(data) - else: - # Must be ASCII string - data = str(data) - unicode(data, "ascii") - labels = data.split(".") - - trailing_dot = u'' - if labels: - if not labels[-1]: - trailing_dot = u'.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = u'.' - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result = u".".join(result) + trailing_dot - size += len(trailing_dot) - return (result, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - -class StreamReader(Codec, codecs.StreamReader): - pass - -def getregentry(): - return codecs.CodecInfo( - name='idna', - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/compat.py deleted file mode 100644 index 4d47f33..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/compat.py +++ /dev/null @@ -1,12 +0,0 @@ -from .core import * -from .codec import * - -def ToASCII(label): - return encode(label) - -def ToUnicode(label): - return decode(label) - -def nameprep(s): - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/core.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/core.py deleted file mode 100644 index 104624a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/core.py +++ /dev/null @@ -1,396 +0,0 @@ -from . import idnadata -import bisect -import unicodedata -import re -import sys -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b'xn--' -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -if sys.version_info[0] == 3: - unicode = str - unichr = chr - -class IDNAError(UnicodeError): - """ Base exception for all IDNA-encoding related problems """ - pass - - -class IDNABidiError(IDNAError): - """ Exception when bidirectional requirements are not satisfied """ - pass - - -class InvalidCodepoint(IDNAError): - """ Exception when a disallowed or unallocated codepoint is used """ - pass - - -class InvalidCodepointContext(IDNAError): - """ Exception when the codepoint is not valid in the context it is used """ - pass - - -def _combining_class(cp): - v = unicodedata.combining(unichr(cp)) - if v == 0: - if not unicodedata.name(unichr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - -def _is_script(cp, script): - return intranges_contain(ord(cp), idnadata.scripts[script]) - -def _punycode(s): - return s.encode('punycode') - -def _unot(s): - return 'U+{0:04X}'.format(s) - - -def valid_label_length(label): - - if len(label) > 63: - return False - return True - - -def valid_string_length(label, trailing_dot): - - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label, check_ltr=False): - - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == '': - # String likely comes from a newer version of Unicode - raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) - if direction in ['R', 'AL', 'AN']: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ['R', 'AL']: - rtl = True - elif direction == 'L': - rtl = False - else: - raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) - - valid_ending = False - number_type = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) - # Bidi rule 3 - if direction in ['R', 'AL', 'EN', 'AN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - # Bidi rule 4 - if direction in ['AN', 'EN']: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError('Can not mix numeral types in a right-to-left label') - else: - # Bidi rule 5 - if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) - # Bidi rule 6 - if direction in ['L', 'EN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - - if not valid_ending: - raise IDNABidiError('Label ends with illegal codepoint directionality') - - return True - - -def check_initial_combiner(label): - - if unicodedata.category(label[0])[0] == 'M': - raise IDNAError('Label begins with an illegal combining character') - return True - - -def check_hyphen_ok(label): - - if label[2:4] == '--': - raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') - if label[0] == '-' or label[-1] == '-': - raise IDNAError('Label must not start or end with a hyphen') - return True - - -def check_nfc(label): - - if unicodedata.normalize('NFC', label) != label: - raise IDNAError('Label must be in Normalization Form C') - - -def valid_contextj(label, pos): - - cp_value = ord(label[pos]) - - if cp_value == 0x200c: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos-1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('L'), ord('D')]: - ok = True - break - - if not ok: - return False - - ok = False - for i in range(pos+1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('R'), ord('D')]: - ok = True - break - return ok - - if cp_value == 0x200d: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - - return False - - -def valid_contexto(label, pos, exception=False): - - cp_value = ord(label[pos]) - - if cp_value == 0x00b7: - if 0 < pos < len(label)-1: - if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label)-1 and len(label) > 1: - return _is_script(label[pos + 1], 'Greek') - return False - - elif cp_value == 0x05f3 or cp_value == 0x05f4: - if pos > 0: - return _is_script(label[pos - 1], 'Hebrew') - return False - - elif cp_value == 0x30fb: - for cp in label: - if cp == u'\u30fb': - continue - if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6f0 <= ord(cp) <= 0x06f9: - return False - return True - - elif 0x6f0 <= cp_value <= 0x6f9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - -def check_label(label): - - if isinstance(label, (bytes, bytearray)): - label = label.decode('utf-8') - if len(label) == 0: - raise IDNAError('Empty Label') - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for (pos, cp) in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( - _unot(cp_value), pos+1, repr(label))) - except ValueError: - raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( - _unot(cp_value), pos+1, repr(label))) - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): - if not valid_contexto(label, pos): - raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) - else: - raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) - - check_bidi(label) - - -def alabel(label): - - try: - label = label.encode('ascii') - ulabel(label) - if not valid_label_length(label): - raise IDNAError('Label too long') - return label - except UnicodeEncodeError: - pass - - if not label: - raise IDNAError('No Input') - - label = unicode(label) - check_label(label) - label = _punycode(label) - label = _alabel_prefix + label - - if not valid_label_length(label): - raise IDNAError('Label too long') - - return label - - -def ulabel(label): - - if not isinstance(label, (bytes, bytearray)): - try: - label = label.encode('ascii') - except UnicodeEncodeError: - check_label(label) - return label - - label = label.lower() - if label.startswith(_alabel_prefix): - label = label[len(_alabel_prefix):] - else: - check_label(label) - return label.decode('ascii') - - label = label.decode('punycode') - check_label(label) - return label - - -def uts46_remap(domain, std3_rules=True, transitional=False): - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - output = u"" - try: - for pos, char in enumerate(domain): - code_point = ord(char) - uts46row = uts46data[code_point if code_point < 256 else - bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement = uts46row[2] if len(uts46row) == 3 else None - if (status == "V" or - (status == "D" and not transitional) or - (status == "3" and not std3_rules and replacement is None)): - output += char - elif replacement is not None and (status == "M" or - (status == "3" and not std3_rules) or - (status == "D" and transitional)): - output += replacement - elif status != "I": - raise IndexError() - return unicodedata.normalize("NFC", output) - except IndexError: - raise InvalidCodepoint( - "Codepoint {0} not allowed at position {1} in {2}".format( - _unot(code_point), pos + 1, repr(domain))) - - -def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split('.') - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if labels[-1] == '': - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append(b'') - s = b'.'.join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError('Domain too long') - return s - - -def decode(s, strict=False, uts46=False, std3_rules=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(u'.') - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append(u'') - return u'.'.join(result) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/idnadata.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/idnadata.py deleted file mode 100644 index a80c959..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/idnadata.py +++ /dev/null @@ -1,1979 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = "11.0.0" -scripts = { - 'Greek': ( - 0x37000000374, - 0x37500000378, - 0x37a0000037e, - 0x37f00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038b, - 0x38c0000038d, - 0x38e000003a2, - 0x3a3000003e2, - 0x3f000000400, - 0x1d2600001d2b, - 0x1d5d00001d62, - 0x1d6600001d6b, - 0x1dbf00001dc0, - 0x1f0000001f16, - 0x1f1800001f1e, - 0x1f2000001f46, - 0x1f4800001f4e, - 0x1f5000001f58, - 0x1f5900001f5a, - 0x1f5b00001f5c, - 0x1f5d00001f5e, - 0x1f5f00001f7e, - 0x1f8000001fb5, - 0x1fb600001fc5, - 0x1fc600001fd4, - 0x1fd600001fdc, - 0x1fdd00001ff0, - 0x1ff200001ff5, - 0x1ff600001fff, - 0x212600002127, - 0xab650000ab66, - 0x101400001018f, - 0x101a0000101a1, - 0x1d2000001d246, - ), - 'Han': ( - 0x2e8000002e9a, - 0x2e9b00002ef4, - 0x2f0000002fd6, - 0x300500003006, - 0x300700003008, - 0x30210000302a, - 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, - 0xf9000000fa6e, - 0xfa700000fada, - 0x200000002a6d7, - 0x2a7000002b735, - 0x2b7400002b81e, - 0x2b8200002cea2, - 0x2ceb00002ebe1, - 0x2f8000002fa1e, - ), - 'Hebrew': ( - 0x591000005c8, - 0x5d0000005eb, - 0x5ef000005f5, - 0xfb1d0000fb37, - 0xfb380000fb3d, - 0xfb3e0000fb3f, - 0xfb400000fb42, - 0xfb430000fb45, - 0xfb460000fb50, - ), - 'Hiragana': ( - 0x304100003097, - 0x309d000030a0, - 0x1b0010001b11f, - 0x1f2000001f201, - ), - 'Katakana': ( - 0x30a1000030fb, - 0x30fd00003100, - 0x31f000003200, - 0x32d0000032ff, - 0x330000003358, - 0xff660000ff70, - 0xff710000ff9e, - 0x1b0000001b001, - ), -} -joining_types = { - 0x600: 85, - 0x601: 85, - 0x602: 85, - 0x603: 85, - 0x604: 85, - 0x605: 85, - 0x608: 85, - 0x60b: 85, - 0x620: 68, - 0x621: 85, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62a: 68, - 0x62b: 68, - 0x62c: 68, - 0x62d: 68, - 0x62e: 68, - 0x62f: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63a: 68, - 0x63b: 68, - 0x63c: 68, - 0x63d: 68, - 0x63e: 68, - 0x63f: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64a: 68, - 0x66e: 68, - 0x66f: 68, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x674: 85, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67a: 68, - 0x67b: 68, - 0x67c: 68, - 0x67d: 68, - 0x67e: 68, - 0x67f: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68a: 82, - 0x68b: 82, - 0x68c: 82, - 0x68d: 82, - 0x68e: 82, - 0x68f: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69a: 68, - 0x69b: 68, - 0x69c: 68, - 0x69d: 68, - 0x69e: 68, - 0x69f: 68, - 0x6a0: 68, - 0x6a1: 68, - 0x6a2: 68, - 0x6a3: 68, - 0x6a4: 68, - 0x6a5: 68, - 0x6a6: 68, - 0x6a7: 68, - 0x6a8: 68, - 0x6a9: 68, - 0x6aa: 68, - 0x6ab: 68, - 0x6ac: 68, - 0x6ad: 68, - 0x6ae: 68, - 0x6af: 68, - 0x6b0: 68, - 0x6b1: 68, - 0x6b2: 68, - 0x6b3: 68, - 0x6b4: 68, - 0x6b5: 68, - 0x6b6: 68, - 0x6b7: 68, - 0x6b8: 68, - 0x6b9: 68, - 0x6ba: 68, - 0x6bb: 68, - 0x6bc: 68, - 0x6bd: 68, - 0x6be: 68, - 0x6bf: 68, - 0x6c0: 82, - 0x6c1: 68, - 0x6c2: 68, - 0x6c3: 82, - 0x6c4: 82, - 0x6c5: 82, - 0x6c6: 82, - 0x6c7: 82, - 0x6c8: 82, - 0x6c9: 82, - 0x6ca: 82, - 0x6cb: 82, - 0x6cc: 68, - 0x6cd: 82, - 0x6ce: 68, - 0x6cf: 82, - 0x6d0: 68, - 0x6d1: 68, - 0x6d2: 82, - 0x6d3: 82, - 0x6d5: 82, - 0x6dd: 85, - 0x6ee: 82, - 0x6ef: 82, - 0x6fa: 68, - 0x6fb: 68, - 0x6fc: 68, - 0x6ff: 68, - 0x70f: 84, - 0x710: 82, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71a: 68, - 0x71b: 68, - 0x71c: 68, - 0x71d: 68, - 0x71e: 82, - 0x71f: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72a: 82, - 0x72b: 68, - 0x72c: 82, - 0x72d: 68, - 0x72e: 68, - 0x72f: 82, - 0x74d: 82, - 0x74e: 68, - 0x74f: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75a: 82, - 0x75b: 82, - 0x75c: 68, - 0x75d: 68, - 0x75e: 68, - 0x75f: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76a: 68, - 0x76b: 82, - 0x76c: 82, - 0x76d: 68, - 0x76e: 68, - 0x76f: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77a: 68, - 0x77b: 68, - 0x77c: 68, - 0x77d: 68, - 0x77e: 68, - 0x77f: 68, - 0x7ca: 68, - 0x7cb: 68, - 0x7cc: 68, - 0x7cd: 68, - 0x7ce: 68, - 0x7cf: 68, - 0x7d0: 68, - 0x7d1: 68, - 0x7d2: 68, - 0x7d3: 68, - 0x7d4: 68, - 0x7d5: 68, - 0x7d6: 68, - 0x7d7: 68, - 0x7d8: 68, - 0x7d9: 68, - 0x7da: 68, - 0x7db: 68, - 0x7dc: 68, - 0x7dd: 68, - 0x7de: 68, - 0x7df: 68, - 0x7e0: 68, - 0x7e1: 68, - 0x7e2: 68, - 0x7e3: 68, - 0x7e4: 68, - 0x7e5: 68, - 0x7e6: 68, - 0x7e7: 68, - 0x7e8: 68, - 0x7e9: 68, - 0x7ea: 68, - 0x7fa: 67, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84a: 68, - 0x84b: 68, - 0x84c: 68, - 0x84d: 68, - 0x84e: 68, - 0x84f: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, - 0x860: 68, - 0x861: 85, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x866: 85, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86a: 82, - 0x8a0: 68, - 0x8a1: 68, - 0x8a2: 68, - 0x8a3: 68, - 0x8a4: 68, - 0x8a5: 68, - 0x8a6: 68, - 0x8a7: 68, - 0x8a8: 68, - 0x8a9: 68, - 0x8aa: 82, - 0x8ab: 82, - 0x8ac: 82, - 0x8ad: 85, - 0x8ae: 82, - 0x8af: 68, - 0x8b0: 68, - 0x8b1: 82, - 0x8b2: 82, - 0x8b3: 68, - 0x8b4: 68, - 0x8b6: 68, - 0x8b7: 68, - 0x8b8: 68, - 0x8b9: 82, - 0x8ba: 68, - 0x8bb: 68, - 0x8bc: 68, - 0x8bd: 68, - 0x8e2: 85, - 0x1806: 85, - 0x1807: 68, - 0x180a: 67, - 0x180e: 85, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182a: 68, - 0x182b: 68, - 0x182c: 68, - 0x182d: 68, - 0x182e: 68, - 0x182f: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183a: 68, - 0x183b: 68, - 0x183c: 68, - 0x183d: 68, - 0x183e: 68, - 0x183f: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184a: 68, - 0x184b: 68, - 0x184c: 68, - 0x184d: 68, - 0x184e: 68, - 0x184f: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185a: 68, - 0x185b: 68, - 0x185c: 68, - 0x185d: 68, - 0x185e: 68, - 0x185f: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186a: 68, - 0x186b: 68, - 0x186c: 68, - 0x186d: 68, - 0x186e: 68, - 0x186f: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1880: 85, - 0x1881: 85, - 0x1882: 85, - 0x1883: 85, - 0x1884: 85, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188a: 68, - 0x188b: 68, - 0x188c: 68, - 0x188d: 68, - 0x188e: 68, - 0x188f: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189a: 68, - 0x189b: 68, - 0x189c: 68, - 0x189d: 68, - 0x189e: 68, - 0x189f: 68, - 0x18a0: 68, - 0x18a1: 68, - 0x18a2: 68, - 0x18a3: 68, - 0x18a4: 68, - 0x18a5: 68, - 0x18a6: 68, - 0x18a7: 68, - 0x18a8: 68, - 0x18aa: 68, - 0x200c: 85, - 0x200d: 67, - 0x202f: 85, - 0x2066: 85, - 0x2067: 85, - 0x2068: 85, - 0x2069: 85, - 0xa840: 68, - 0xa841: 68, - 0xa842: 68, - 0xa843: 68, - 0xa844: 68, - 0xa845: 68, - 0xa846: 68, - 0xa847: 68, - 0xa848: 68, - 0xa849: 68, - 0xa84a: 68, - 0xa84b: 68, - 0xa84c: 68, - 0xa84d: 68, - 0xa84e: 68, - 0xa84f: 68, - 0xa850: 68, - 0xa851: 68, - 0xa852: 68, - 0xa853: 68, - 0xa854: 68, - 0xa855: 68, - 0xa856: 68, - 0xa857: 68, - 0xa858: 68, - 0xa859: 68, - 0xa85a: 68, - 0xa85b: 68, - 0xa85c: 68, - 0xa85d: 68, - 0xa85e: 68, - 0xa85f: 68, - 0xa860: 68, - 0xa861: 68, - 0xa862: 68, - 0xa863: 68, - 0xa864: 68, - 0xa865: 68, - 0xa866: 68, - 0xa867: 68, - 0xa868: 68, - 0xa869: 68, - 0xa86a: 68, - 0xa86b: 68, - 0xa86c: 68, - 0xa86d: 68, - 0xa86e: 68, - 0xa86f: 68, - 0xa870: 68, - 0xa871: 68, - 0xa872: 76, - 0xa873: 85, - 0x10ac0: 68, - 0x10ac1: 68, - 0x10ac2: 68, - 0x10ac3: 68, - 0x10ac4: 68, - 0x10ac5: 82, - 0x10ac6: 85, - 0x10ac7: 82, - 0x10ac8: 85, - 0x10ac9: 82, - 0x10aca: 82, - 0x10acb: 85, - 0x10acc: 85, - 0x10acd: 76, - 0x10ace: 82, - 0x10acf: 82, - 0x10ad0: 82, - 0x10ad1: 82, - 0x10ad2: 82, - 0x10ad3: 68, - 0x10ad4: 68, - 0x10ad5: 68, - 0x10ad6: 68, - 0x10ad7: 76, - 0x10ad8: 68, - 0x10ad9: 68, - 0x10ada: 68, - 0x10adb: 68, - 0x10adc: 68, - 0x10add: 82, - 0x10ade: 68, - 0x10adf: 68, - 0x10ae0: 68, - 0x10ae1: 82, - 0x10ae2: 85, - 0x10ae3: 85, - 0x10ae4: 82, - 0x10aeb: 68, - 0x10aec: 68, - 0x10aed: 68, - 0x10aee: 68, - 0x10aef: 82, - 0x10b80: 68, - 0x10b81: 82, - 0x10b82: 68, - 0x10b83: 82, - 0x10b84: 82, - 0x10b85: 82, - 0x10b86: 68, - 0x10b87: 68, - 0x10b88: 68, - 0x10b89: 82, - 0x10b8a: 68, - 0x10b8b: 68, - 0x10b8c: 82, - 0x10b8d: 68, - 0x10b8e: 82, - 0x10b8f: 82, - 0x10b90: 68, - 0x10b91: 82, - 0x10ba9: 82, - 0x10baa: 82, - 0x10bab: 82, - 0x10bac: 82, - 0x10bad: 68, - 0x10bae: 68, - 0x10baf: 85, - 0x10d00: 76, - 0x10d01: 68, - 0x10d02: 68, - 0x10d03: 68, - 0x10d04: 68, - 0x10d05: 68, - 0x10d06: 68, - 0x10d07: 68, - 0x10d08: 68, - 0x10d09: 68, - 0x10d0a: 68, - 0x10d0b: 68, - 0x10d0c: 68, - 0x10d0d: 68, - 0x10d0e: 68, - 0x10d0f: 68, - 0x10d10: 68, - 0x10d11: 68, - 0x10d12: 68, - 0x10d13: 68, - 0x10d14: 68, - 0x10d15: 68, - 0x10d16: 68, - 0x10d17: 68, - 0x10d18: 68, - 0x10d19: 68, - 0x10d1a: 68, - 0x10d1b: 68, - 0x10d1c: 68, - 0x10d1d: 68, - 0x10d1e: 68, - 0x10d1f: 68, - 0x10d20: 68, - 0x10d21: 68, - 0x10d22: 82, - 0x10d23: 68, - 0x10f30: 68, - 0x10f31: 68, - 0x10f32: 68, - 0x10f33: 82, - 0x10f34: 68, - 0x10f35: 68, - 0x10f36: 68, - 0x10f37: 68, - 0x10f38: 68, - 0x10f39: 68, - 0x10f3a: 68, - 0x10f3b: 68, - 0x10f3c: 68, - 0x10f3d: 68, - 0x10f3e: 68, - 0x10f3f: 68, - 0x10f40: 68, - 0x10f41: 68, - 0x10f42: 68, - 0x10f43: 68, - 0x10f44: 68, - 0x10f45: 85, - 0x10f51: 68, - 0x10f52: 68, - 0x10f53: 68, - 0x10f54: 82, - 0x110bd: 85, - 0x110cd: 85, - 0x1e900: 68, - 0x1e901: 68, - 0x1e902: 68, - 0x1e903: 68, - 0x1e904: 68, - 0x1e905: 68, - 0x1e906: 68, - 0x1e907: 68, - 0x1e908: 68, - 0x1e909: 68, - 0x1e90a: 68, - 0x1e90b: 68, - 0x1e90c: 68, - 0x1e90d: 68, - 0x1e90e: 68, - 0x1e90f: 68, - 0x1e910: 68, - 0x1e911: 68, - 0x1e912: 68, - 0x1e913: 68, - 0x1e914: 68, - 0x1e915: 68, - 0x1e916: 68, - 0x1e917: 68, - 0x1e918: 68, - 0x1e919: 68, - 0x1e91a: 68, - 0x1e91b: 68, - 0x1e91c: 68, - 0x1e91d: 68, - 0x1e91e: 68, - 0x1e91f: 68, - 0x1e920: 68, - 0x1e921: 68, - 0x1e922: 68, - 0x1e923: 68, - 0x1e924: 68, - 0x1e925: 68, - 0x1e926: 68, - 0x1e927: 68, - 0x1e928: 68, - 0x1e929: 68, - 0x1e92a: 68, - 0x1e92b: 68, - 0x1e92c: 68, - 0x1e92d: 68, - 0x1e92e: 68, - 0x1e92f: 68, - 0x1e930: 68, - 0x1e931: 68, - 0x1e932: 68, - 0x1e933: 68, - 0x1e934: 68, - 0x1e935: 68, - 0x1e936: 68, - 0x1e937: 68, - 0x1e938: 68, - 0x1e939: 68, - 0x1e93a: 68, - 0x1e93b: 68, - 0x1e93c: 68, - 0x1e93d: 68, - 0x1e93e: 68, - 0x1e93f: 68, - 0x1e940: 68, - 0x1e941: 68, - 0x1e942: 68, - 0x1e943: 68, -} -codepoint_classes = { - 'PVALID': ( - 0x2d0000002e, - 0x300000003a, - 0x610000007b, - 0xdf000000f7, - 0xf800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010a, - 0x10b0000010c, - 0x10d0000010e, - 0x10f00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011a, - 0x11b0000011c, - 0x11d0000011e, - 0x11f00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012a, - 0x12b0000012c, - 0x12d0000012e, - 0x12f00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13a0000013b, - 0x13c0000013d, - 0x13e0000013f, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14b0000014c, - 0x14d0000014e, - 0x14f00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015a, - 0x15b0000015c, - 0x15d0000015e, - 0x15f00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016a, - 0x16b0000016c, - 0x16d0000016e, - 0x16f00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17a0000017b, - 0x17c0000017d, - 0x17e0000017f, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18c0000018e, - 0x19200000193, - 0x19500000196, - 0x1990000019c, - 0x19e0000019f, - 0x1a1000001a2, - 0x1a3000001a4, - 0x1a5000001a6, - 0x1a8000001a9, - 0x1aa000001ac, - 0x1ad000001ae, - 0x1b0000001b1, - 0x1b4000001b5, - 0x1b6000001b7, - 0x1b9000001bc, - 0x1bd000001c4, - 0x1ce000001cf, - 0x1d0000001d1, - 0x1d2000001d3, - 0x1d4000001d5, - 0x1d6000001d7, - 0x1d8000001d9, - 0x1da000001db, - 0x1dc000001de, - 0x1df000001e0, - 0x1e1000001e2, - 0x1e3000001e4, - 0x1e5000001e6, - 0x1e7000001e8, - 0x1e9000001ea, - 0x1eb000001ec, - 0x1ed000001ee, - 0x1ef000001f1, - 0x1f5000001f6, - 0x1f9000001fa, - 0x1fb000001fc, - 0x1fd000001fe, - 0x1ff00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020a, - 0x20b0000020c, - 0x20d0000020e, - 0x20f00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021a, - 0x21b0000021c, - 0x21d0000021e, - 0x21f00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022a, - 0x22b0000022c, - 0x22d0000022e, - 0x22f00000230, - 0x23100000232, - 0x2330000023a, - 0x23c0000023d, - 0x23f00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024a, - 0x24b0000024c, - 0x24d0000024e, - 0x24f000002b0, - 0x2b9000002c2, - 0x2c6000002d2, - 0x2ec000002ed, - 0x2ee000002ef, - 0x30000000340, - 0x34200000343, - 0x3460000034f, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37b0000037e, - 0x39000000391, - 0x3ac000003cf, - 0x3d7000003d8, - 0x3d9000003da, - 0x3db000003dc, - 0x3dd000003de, - 0x3df000003e0, - 0x3e1000003e2, - 0x3e3000003e4, - 0x3e5000003e6, - 0x3e7000003e8, - 0x3e9000003ea, - 0x3eb000003ec, - 0x3ed000003ee, - 0x3ef000003f0, - 0x3f3000003f4, - 0x3f8000003f9, - 0x3fb000003fd, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046a, - 0x46b0000046c, - 0x46d0000046e, - 0x46f00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047a, - 0x47b0000047c, - 0x47d0000047e, - 0x47f00000480, - 0x48100000482, - 0x48300000488, - 0x48b0000048c, - 0x48d0000048e, - 0x48f00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049a, - 0x49b0000049c, - 0x49d0000049e, - 0x49f000004a0, - 0x4a1000004a2, - 0x4a3000004a4, - 0x4a5000004a6, - 0x4a7000004a8, - 0x4a9000004aa, - 0x4ab000004ac, - 0x4ad000004ae, - 0x4af000004b0, - 0x4b1000004b2, - 0x4b3000004b4, - 0x4b5000004b6, - 0x4b7000004b8, - 0x4b9000004ba, - 0x4bb000004bc, - 0x4bd000004be, - 0x4bf000004c0, - 0x4c2000004c3, - 0x4c4000004c5, - 0x4c6000004c7, - 0x4c8000004c9, - 0x4ca000004cb, - 0x4cc000004cd, - 0x4ce000004d0, - 0x4d1000004d2, - 0x4d3000004d4, - 0x4d5000004d6, - 0x4d7000004d8, - 0x4d9000004da, - 0x4db000004dc, - 0x4dd000004de, - 0x4df000004e0, - 0x4e1000004e2, - 0x4e3000004e4, - 0x4e5000004e6, - 0x4e7000004e8, - 0x4e9000004ea, - 0x4eb000004ec, - 0x4ed000004ee, - 0x4ef000004f0, - 0x4f1000004f2, - 0x4f3000004f4, - 0x4f5000004f6, - 0x4f7000004f8, - 0x4f9000004fa, - 0x4fb000004fc, - 0x4fd000004fe, - 0x4ff00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050a, - 0x50b0000050c, - 0x50d0000050e, - 0x50f00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051a, - 0x51b0000051c, - 0x51d0000051e, - 0x51f00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052a, - 0x52b0000052c, - 0x52d0000052e, - 0x52f00000530, - 0x5590000055a, - 0x56000000587, - 0x58800000589, - 0x591000005be, - 0x5bf000005c0, - 0x5c1000005c3, - 0x5c4000005c6, - 0x5c7000005c8, - 0x5d0000005eb, - 0x5ef000005f3, - 0x6100000061b, - 0x62000000640, - 0x64100000660, - 0x66e00000675, - 0x679000006d4, - 0x6d5000006dd, - 0x6df000006e9, - 0x6ea000006f0, - 0x6fa00000700, - 0x7100000074b, - 0x74d000007b2, - 0x7c0000007f6, - 0x7fd000007fe, - 0x8000000082e, - 0x8400000085c, - 0x8600000086b, - 0x8a0000008b5, - 0x8b6000008be, - 0x8d3000008e2, - 0x8e300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098d, - 0x98f00000991, - 0x993000009a9, - 0x9aa000009b1, - 0x9b2000009b3, - 0x9b6000009ba, - 0x9bc000009c5, - 0x9c7000009c9, - 0x9cb000009cf, - 0x9d7000009d8, - 0x9e0000009e4, - 0x9e6000009f2, - 0x9fc000009fd, - 0x9fe000009ff, - 0xa0100000a04, - 0xa0500000a0b, - 0xa0f00000a11, - 0xa1300000a29, - 0xa2a00000a31, - 0xa3200000a33, - 0xa3500000a36, - 0xa3800000a3a, - 0xa3c00000a3d, - 0xa3e00000a43, - 0xa4700000a49, - 0xa4b00000a4e, - 0xa5100000a52, - 0xa5c00000a5d, - 0xa6600000a76, - 0xa8100000a84, - 0xa8500000a8e, - 0xa8f00000a92, - 0xa9300000aa9, - 0xaaa00000ab1, - 0xab200000ab4, - 0xab500000aba, - 0xabc00000ac6, - 0xac700000aca, - 0xacb00000ace, - 0xad000000ad1, - 0xae000000ae4, - 0xae600000af0, - 0xaf900000b00, - 0xb0100000b04, - 0xb0500000b0d, - 0xb0f00000b11, - 0xb1300000b29, - 0xb2a00000b31, - 0xb3200000b34, - 0xb3500000b3a, - 0xb3c00000b45, - 0xb4700000b49, - 0xb4b00000b4e, - 0xb5600000b58, - 0xb5f00000b64, - 0xb6600000b70, - 0xb7100000b72, - 0xb8200000b84, - 0xb8500000b8b, - 0xb8e00000b91, - 0xb9200000b96, - 0xb9900000b9b, - 0xb9c00000b9d, - 0xb9e00000ba0, - 0xba300000ba5, - 0xba800000bab, - 0xbae00000bba, - 0xbbe00000bc3, - 0xbc600000bc9, - 0xbca00000bce, - 0xbd000000bd1, - 0xbd700000bd8, - 0xbe600000bf0, - 0xc0000000c0d, - 0xc0e00000c11, - 0xc1200000c29, - 0xc2a00000c3a, - 0xc3d00000c45, - 0xc4600000c49, - 0xc4a00000c4e, - 0xc5500000c57, - 0xc5800000c5b, - 0xc6000000c64, - 0xc6600000c70, - 0xc8000000c84, - 0xc8500000c8d, - 0xc8e00000c91, - 0xc9200000ca9, - 0xcaa00000cb4, - 0xcb500000cba, - 0xcbc00000cc5, - 0xcc600000cc9, - 0xcca00000cce, - 0xcd500000cd7, - 0xcde00000cdf, - 0xce000000ce4, - 0xce600000cf0, - 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, - 0xd0e00000d11, - 0xd1200000d45, - 0xd4600000d49, - 0xd4a00000d4f, - 0xd5400000d58, - 0xd5f00000d64, - 0xd6600000d70, - 0xd7a00000d80, - 0xd8200000d84, - 0xd8500000d97, - 0xd9a00000db2, - 0xdb300000dbc, - 0xdbd00000dbe, - 0xdc000000dc7, - 0xdca00000dcb, - 0xdcf00000dd5, - 0xdd600000dd7, - 0xdd800000de0, - 0xde600000df0, - 0xdf200000df4, - 0xe0100000e33, - 0xe3400000e3b, - 0xe4000000e4f, - 0xe5000000e5a, - 0xe8100000e83, - 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, - 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, - 0xec000000ec5, - 0xec600000ec7, - 0xec800000ece, - 0xed000000eda, - 0xede00000ee0, - 0xf0000000f01, - 0xf0b00000f0c, - 0xf1800000f1a, - 0xf2000000f2a, - 0xf3500000f36, - 0xf3700000f38, - 0xf3900000f3a, - 0xf3e00000f43, - 0xf4400000f48, - 0xf4900000f4d, - 0xf4e00000f52, - 0xf5300000f57, - 0xf5800000f5c, - 0xf5d00000f69, - 0xf6a00000f6d, - 0xf7100000f73, - 0xf7400000f75, - 0xf7a00000f81, - 0xf8200000f85, - 0xf8600000f93, - 0xf9400000f98, - 0xf9900000f9d, - 0xf9e00000fa2, - 0xfa300000fa7, - 0xfa800000fac, - 0xfad00000fb9, - 0xfba00000fbd, - 0xfc600000fc7, - 0x10000000104a, - 0x10500000109e, - 0x10d0000010fb, - 0x10fd00001100, - 0x120000001249, - 0x124a0000124e, - 0x125000001257, - 0x125800001259, - 0x125a0000125e, - 0x126000001289, - 0x128a0000128e, - 0x1290000012b1, - 0x12b2000012b6, - 0x12b8000012bf, - 0x12c0000012c1, - 0x12c2000012c6, - 0x12c8000012d7, - 0x12d800001311, - 0x131200001316, - 0x13180000135b, - 0x135d00001360, - 0x138000001390, - 0x13a0000013f6, - 0x14010000166d, - 0x166f00001680, - 0x16810000169b, - 0x16a0000016eb, - 0x16f1000016f9, - 0x17000000170d, - 0x170e00001715, - 0x172000001735, - 0x174000001754, - 0x17600000176d, - 0x176e00001771, - 0x177200001774, - 0x1780000017b4, - 0x17b6000017d4, - 0x17d7000017d8, - 0x17dc000017de, - 0x17e0000017ea, - 0x18100000181a, - 0x182000001879, - 0x1880000018ab, - 0x18b0000018f6, - 0x19000000191f, - 0x19200000192c, - 0x19300000193c, - 0x19460000196e, - 0x197000001975, - 0x1980000019ac, - 0x19b0000019ca, - 0x19d0000019da, - 0x1a0000001a1c, - 0x1a2000001a5f, - 0x1a6000001a7d, - 0x1a7f00001a8a, - 0x1a9000001a9a, - 0x1aa700001aa8, - 0x1ab000001abe, - 0x1b0000001b4c, - 0x1b5000001b5a, - 0x1b6b00001b74, - 0x1b8000001bf4, - 0x1c0000001c38, - 0x1c4000001c4a, - 0x1c4d00001c7e, - 0x1cd000001cd3, - 0x1cd400001cfa, - 0x1d0000001d2c, - 0x1d2f00001d30, - 0x1d3b00001d3c, - 0x1d4e00001d4f, - 0x1d6b00001d78, - 0x1d7900001d9b, - 0x1dc000001dfa, - 0x1dfb00001e00, - 0x1e0100001e02, - 0x1e0300001e04, - 0x1e0500001e06, - 0x1e0700001e08, - 0x1e0900001e0a, - 0x1e0b00001e0c, - 0x1e0d00001e0e, - 0x1e0f00001e10, - 0x1e1100001e12, - 0x1e1300001e14, - 0x1e1500001e16, - 0x1e1700001e18, - 0x1e1900001e1a, - 0x1e1b00001e1c, - 0x1e1d00001e1e, - 0x1e1f00001e20, - 0x1e2100001e22, - 0x1e2300001e24, - 0x1e2500001e26, - 0x1e2700001e28, - 0x1e2900001e2a, - 0x1e2b00001e2c, - 0x1e2d00001e2e, - 0x1e2f00001e30, - 0x1e3100001e32, - 0x1e3300001e34, - 0x1e3500001e36, - 0x1e3700001e38, - 0x1e3900001e3a, - 0x1e3b00001e3c, - 0x1e3d00001e3e, - 0x1e3f00001e40, - 0x1e4100001e42, - 0x1e4300001e44, - 0x1e4500001e46, - 0x1e4700001e48, - 0x1e4900001e4a, - 0x1e4b00001e4c, - 0x1e4d00001e4e, - 0x1e4f00001e50, - 0x1e5100001e52, - 0x1e5300001e54, - 0x1e5500001e56, - 0x1e5700001e58, - 0x1e5900001e5a, - 0x1e5b00001e5c, - 0x1e5d00001e5e, - 0x1e5f00001e60, - 0x1e6100001e62, - 0x1e6300001e64, - 0x1e6500001e66, - 0x1e6700001e68, - 0x1e6900001e6a, - 0x1e6b00001e6c, - 0x1e6d00001e6e, - 0x1e6f00001e70, - 0x1e7100001e72, - 0x1e7300001e74, - 0x1e7500001e76, - 0x1e7700001e78, - 0x1e7900001e7a, - 0x1e7b00001e7c, - 0x1e7d00001e7e, - 0x1e7f00001e80, - 0x1e8100001e82, - 0x1e8300001e84, - 0x1e8500001e86, - 0x1e8700001e88, - 0x1e8900001e8a, - 0x1e8b00001e8c, - 0x1e8d00001e8e, - 0x1e8f00001e90, - 0x1e9100001e92, - 0x1e9300001e94, - 0x1e9500001e9a, - 0x1e9c00001e9e, - 0x1e9f00001ea0, - 0x1ea100001ea2, - 0x1ea300001ea4, - 0x1ea500001ea6, - 0x1ea700001ea8, - 0x1ea900001eaa, - 0x1eab00001eac, - 0x1ead00001eae, - 0x1eaf00001eb0, - 0x1eb100001eb2, - 0x1eb300001eb4, - 0x1eb500001eb6, - 0x1eb700001eb8, - 0x1eb900001eba, - 0x1ebb00001ebc, - 0x1ebd00001ebe, - 0x1ebf00001ec0, - 0x1ec100001ec2, - 0x1ec300001ec4, - 0x1ec500001ec6, - 0x1ec700001ec8, - 0x1ec900001eca, - 0x1ecb00001ecc, - 0x1ecd00001ece, - 0x1ecf00001ed0, - 0x1ed100001ed2, - 0x1ed300001ed4, - 0x1ed500001ed6, - 0x1ed700001ed8, - 0x1ed900001eda, - 0x1edb00001edc, - 0x1edd00001ede, - 0x1edf00001ee0, - 0x1ee100001ee2, - 0x1ee300001ee4, - 0x1ee500001ee6, - 0x1ee700001ee8, - 0x1ee900001eea, - 0x1eeb00001eec, - 0x1eed00001eee, - 0x1eef00001ef0, - 0x1ef100001ef2, - 0x1ef300001ef4, - 0x1ef500001ef6, - 0x1ef700001ef8, - 0x1ef900001efa, - 0x1efb00001efc, - 0x1efd00001efe, - 0x1eff00001f08, - 0x1f1000001f16, - 0x1f2000001f28, - 0x1f3000001f38, - 0x1f4000001f46, - 0x1f5000001f58, - 0x1f6000001f68, - 0x1f7000001f71, - 0x1f7200001f73, - 0x1f7400001f75, - 0x1f7600001f77, - 0x1f7800001f79, - 0x1f7a00001f7b, - 0x1f7c00001f7d, - 0x1fb000001fb2, - 0x1fb600001fb7, - 0x1fc600001fc7, - 0x1fd000001fd3, - 0x1fd600001fd8, - 0x1fe000001fe3, - 0x1fe400001fe8, - 0x1ff600001ff7, - 0x214e0000214f, - 0x218400002185, - 0x2c3000002c5f, - 0x2c6100002c62, - 0x2c6500002c67, - 0x2c6800002c69, - 0x2c6a00002c6b, - 0x2c6c00002c6d, - 0x2c7100002c72, - 0x2c7300002c75, - 0x2c7600002c7c, - 0x2c8100002c82, - 0x2c8300002c84, - 0x2c8500002c86, - 0x2c8700002c88, - 0x2c8900002c8a, - 0x2c8b00002c8c, - 0x2c8d00002c8e, - 0x2c8f00002c90, - 0x2c9100002c92, - 0x2c9300002c94, - 0x2c9500002c96, - 0x2c9700002c98, - 0x2c9900002c9a, - 0x2c9b00002c9c, - 0x2c9d00002c9e, - 0x2c9f00002ca0, - 0x2ca100002ca2, - 0x2ca300002ca4, - 0x2ca500002ca6, - 0x2ca700002ca8, - 0x2ca900002caa, - 0x2cab00002cac, - 0x2cad00002cae, - 0x2caf00002cb0, - 0x2cb100002cb2, - 0x2cb300002cb4, - 0x2cb500002cb6, - 0x2cb700002cb8, - 0x2cb900002cba, - 0x2cbb00002cbc, - 0x2cbd00002cbe, - 0x2cbf00002cc0, - 0x2cc100002cc2, - 0x2cc300002cc4, - 0x2cc500002cc6, - 0x2cc700002cc8, - 0x2cc900002cca, - 0x2ccb00002ccc, - 0x2ccd00002cce, - 0x2ccf00002cd0, - 0x2cd100002cd2, - 0x2cd300002cd4, - 0x2cd500002cd6, - 0x2cd700002cd8, - 0x2cd900002cda, - 0x2cdb00002cdc, - 0x2cdd00002cde, - 0x2cdf00002ce0, - 0x2ce100002ce2, - 0x2ce300002ce5, - 0x2cec00002ced, - 0x2cee00002cf2, - 0x2cf300002cf4, - 0x2d0000002d26, - 0x2d2700002d28, - 0x2d2d00002d2e, - 0x2d3000002d68, - 0x2d7f00002d97, - 0x2da000002da7, - 0x2da800002daf, - 0x2db000002db7, - 0x2db800002dbf, - 0x2dc000002dc7, - 0x2dc800002dcf, - 0x2dd000002dd7, - 0x2dd800002ddf, - 0x2de000002e00, - 0x2e2f00002e30, - 0x300500003008, - 0x302a0000302e, - 0x303c0000303d, - 0x304100003097, - 0x30990000309b, - 0x309d0000309f, - 0x30a1000030fb, - 0x30fc000030ff, - 0x310500003130, - 0x31a0000031bb, - 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, - 0xa0000000a48d, - 0xa4d00000a4fe, - 0xa5000000a60d, - 0xa6100000a62c, - 0xa6410000a642, - 0xa6430000a644, - 0xa6450000a646, - 0xa6470000a648, - 0xa6490000a64a, - 0xa64b0000a64c, - 0xa64d0000a64e, - 0xa64f0000a650, - 0xa6510000a652, - 0xa6530000a654, - 0xa6550000a656, - 0xa6570000a658, - 0xa6590000a65a, - 0xa65b0000a65c, - 0xa65d0000a65e, - 0xa65f0000a660, - 0xa6610000a662, - 0xa6630000a664, - 0xa6650000a666, - 0xa6670000a668, - 0xa6690000a66a, - 0xa66b0000a66c, - 0xa66d0000a670, - 0xa6740000a67e, - 0xa67f0000a680, - 0xa6810000a682, - 0xa6830000a684, - 0xa6850000a686, - 0xa6870000a688, - 0xa6890000a68a, - 0xa68b0000a68c, - 0xa68d0000a68e, - 0xa68f0000a690, - 0xa6910000a692, - 0xa6930000a694, - 0xa6950000a696, - 0xa6970000a698, - 0xa6990000a69a, - 0xa69b0000a69c, - 0xa69e0000a6e6, - 0xa6f00000a6f2, - 0xa7170000a720, - 0xa7230000a724, - 0xa7250000a726, - 0xa7270000a728, - 0xa7290000a72a, - 0xa72b0000a72c, - 0xa72d0000a72e, - 0xa72f0000a732, - 0xa7330000a734, - 0xa7350000a736, - 0xa7370000a738, - 0xa7390000a73a, - 0xa73b0000a73c, - 0xa73d0000a73e, - 0xa73f0000a740, - 0xa7410000a742, - 0xa7430000a744, - 0xa7450000a746, - 0xa7470000a748, - 0xa7490000a74a, - 0xa74b0000a74c, - 0xa74d0000a74e, - 0xa74f0000a750, - 0xa7510000a752, - 0xa7530000a754, - 0xa7550000a756, - 0xa7570000a758, - 0xa7590000a75a, - 0xa75b0000a75c, - 0xa75d0000a75e, - 0xa75f0000a760, - 0xa7610000a762, - 0xa7630000a764, - 0xa7650000a766, - 0xa7670000a768, - 0xa7690000a76a, - 0xa76b0000a76c, - 0xa76d0000a76e, - 0xa76f0000a770, - 0xa7710000a779, - 0xa77a0000a77b, - 0xa77c0000a77d, - 0xa77f0000a780, - 0xa7810000a782, - 0xa7830000a784, - 0xa7850000a786, - 0xa7870000a789, - 0xa78c0000a78d, - 0xa78e0000a790, - 0xa7910000a792, - 0xa7930000a796, - 0xa7970000a798, - 0xa7990000a79a, - 0xa79b0000a79c, - 0xa79d0000a79e, - 0xa79f0000a7a0, - 0xa7a10000a7a2, - 0xa7a30000a7a4, - 0xa7a50000a7a6, - 0xa7a70000a7a8, - 0xa7a90000a7aa, - 0xa7af0000a7b0, - 0xa7b50000a7b6, - 0xa7b70000a7b8, - 0xa7b90000a7ba, - 0xa7f70000a7f8, - 0xa7fa0000a828, - 0xa8400000a874, - 0xa8800000a8c6, - 0xa8d00000a8da, - 0xa8e00000a8f8, - 0xa8fb0000a8fc, - 0xa8fd0000a92e, - 0xa9300000a954, - 0xa9800000a9c1, - 0xa9cf0000a9da, - 0xa9e00000a9ff, - 0xaa000000aa37, - 0xaa400000aa4e, - 0xaa500000aa5a, - 0xaa600000aa77, - 0xaa7a0000aac3, - 0xaadb0000aade, - 0xaae00000aaf0, - 0xaaf20000aaf7, - 0xab010000ab07, - 0xab090000ab0f, - 0xab110000ab17, - 0xab200000ab27, - 0xab280000ab2f, - 0xab300000ab5b, - 0xab600000ab66, - 0xabc00000abeb, - 0xabec0000abee, - 0xabf00000abfa, - 0xac000000d7a4, - 0xfa0e0000fa10, - 0xfa110000fa12, - 0xfa130000fa15, - 0xfa1f0000fa20, - 0xfa210000fa22, - 0xfa230000fa25, - 0xfa270000fa2a, - 0xfb1e0000fb1f, - 0xfe200000fe30, - 0xfe730000fe74, - 0x100000001000c, - 0x1000d00010027, - 0x100280001003b, - 0x1003c0001003e, - 0x1003f0001004e, - 0x100500001005e, - 0x10080000100fb, - 0x101fd000101fe, - 0x102800001029d, - 0x102a0000102d1, - 0x102e0000102e1, - 0x1030000010320, - 0x1032d00010341, - 0x103420001034a, - 0x103500001037b, - 0x103800001039e, - 0x103a0000103c4, - 0x103c8000103d0, - 0x104280001049e, - 0x104a0000104aa, - 0x104d8000104fc, - 0x1050000010528, - 0x1053000010564, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1080000010806, - 0x1080800010809, - 0x1080a00010836, - 0x1083700010839, - 0x1083c0001083d, - 0x1083f00010856, - 0x1086000010877, - 0x108800001089f, - 0x108e0000108f3, - 0x108f4000108f6, - 0x1090000010916, - 0x109200001093a, - 0x10980000109b8, - 0x109be000109c0, - 0x10a0000010a04, - 0x10a0500010a07, - 0x10a0c00010a14, - 0x10a1500010a18, - 0x10a1900010a36, - 0x10a3800010a3b, - 0x10a3f00010a40, - 0x10a6000010a7d, - 0x10a8000010a9d, - 0x10ac000010ac8, - 0x10ac900010ae7, - 0x10b0000010b36, - 0x10b4000010b56, - 0x10b6000010b73, - 0x10b8000010b92, - 0x10c0000010c49, - 0x10cc000010cf3, - 0x10d0000010d28, - 0x10d3000010d3a, - 0x10f0000010f1d, - 0x10f2700010f28, - 0x10f3000010f51, - 0x1100000011047, - 0x1106600011070, - 0x1107f000110bb, - 0x110d0000110e9, - 0x110f0000110fa, - 0x1110000011135, - 0x1113600011140, - 0x1114400011147, - 0x1115000011174, - 0x1117600011177, - 0x11180000111c5, - 0x111c9000111cd, - 0x111d0000111db, - 0x111dc000111dd, - 0x1120000011212, - 0x1121300011238, - 0x1123e0001123f, - 0x1128000011287, - 0x1128800011289, - 0x1128a0001128e, - 0x1128f0001129e, - 0x1129f000112a9, - 0x112b0000112eb, - 0x112f0000112fa, - 0x1130000011304, - 0x113050001130d, - 0x1130f00011311, - 0x1131300011329, - 0x1132a00011331, - 0x1133200011334, - 0x113350001133a, - 0x1133b00011345, - 0x1134700011349, - 0x1134b0001134e, - 0x1135000011351, - 0x1135700011358, - 0x1135d00011364, - 0x113660001136d, - 0x1137000011375, - 0x114000001144b, - 0x114500001145a, - 0x1145e0001145f, - 0x11480000114c6, - 0x114c7000114c8, - 0x114d0000114da, - 0x11580000115b6, - 0x115b8000115c1, - 0x115d8000115de, - 0x1160000011641, - 0x1164400011645, - 0x116500001165a, - 0x11680000116b8, - 0x116c0000116ca, - 0x117000001171b, - 0x1171d0001172c, - 0x117300001173a, - 0x118000001183b, - 0x118c0000118ea, - 0x118ff00011900, - 0x11a0000011a3f, - 0x11a4700011a48, - 0x11a5000011a84, - 0x11a8600011a9a, - 0x11a9d00011a9e, - 0x11ac000011af9, - 0x11c0000011c09, - 0x11c0a00011c37, - 0x11c3800011c41, - 0x11c5000011c5a, - 0x11c7200011c90, - 0x11c9200011ca8, - 0x11ca900011cb7, - 0x11d0000011d07, - 0x11d0800011d0a, - 0x11d0b00011d37, - 0x11d3a00011d3b, - 0x11d3c00011d3e, - 0x11d3f00011d48, - 0x11d5000011d5a, - 0x11d6000011d66, - 0x11d6700011d69, - 0x11d6a00011d8f, - 0x11d9000011d92, - 0x11d9300011d99, - 0x11da000011daa, - 0x11ee000011ef7, - 0x120000001239a, - 0x1248000012544, - 0x130000001342f, - 0x1440000014647, - 0x1680000016a39, - 0x16a4000016a5f, - 0x16a6000016a6a, - 0x16ad000016aee, - 0x16af000016af5, - 0x16b0000016b37, - 0x16b4000016b44, - 0x16b5000016b5a, - 0x16b6300016b78, - 0x16b7d00016b90, - 0x16e6000016e80, - 0x16f0000016f45, - 0x16f5000016f7f, - 0x16f8f00016fa0, - 0x16fe000016fe2, - 0x17000000187f2, - 0x1880000018af3, - 0x1b0000001b11f, - 0x1b1700001b2fc, - 0x1bc000001bc6b, - 0x1bc700001bc7d, - 0x1bc800001bc89, - 0x1bc900001bc9a, - 0x1bc9d0001bc9f, - 0x1da000001da37, - 0x1da3b0001da6d, - 0x1da750001da76, - 0x1da840001da85, - 0x1da9b0001daa0, - 0x1daa10001dab0, - 0x1e0000001e007, - 0x1e0080001e019, - 0x1e01b0001e022, - 0x1e0230001e025, - 0x1e0260001e02b, - 0x1e8000001e8c5, - 0x1e8d00001e8d7, - 0x1e9220001e94b, - 0x1e9500001e95a, - 0x200000002a6d7, - 0x2a7000002b735, - 0x2b7400002b81e, - 0x2b8200002cea2, - 0x2ceb00002ebe1, - ), - 'CONTEXTJ': ( - 0x200c0000200e, - ), - 'CONTEXTO': ( - 0xb7000000b8, - 0x37500000376, - 0x5f3000005f5, - 0x6600000066a, - 0x6f0000006fa, - 0x30fb000030fc, - ), -} diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/intranges.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/intranges.py deleted file mode 100644 index fa8a735..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/intranges.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect - -def intranges_from_list(list_): - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i+1 < len(sorted_list): - if sorted_list[i] == sorted_list[i+1]-1: - continue - current_range = sorted_list[last_write+1:i+1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - -def _encode_range(start, end): - return (start << 32) | end - -def _decode_range(r): - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_, ranges): - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos-1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/package_data.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/package_data.py deleted file mode 100644 index 257e898..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/package_data.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = '2.8' - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/idna/uts46data.py b/.tox/py37-normal/lib/python3.7/site-packages/idna/uts46data.py deleted file mode 100644 index a68ed4c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8205 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "11.0.0" -def _seg_0(): - return [ - (0x0, '3'), - (0x1, '3'), - (0x2, '3'), - (0x3, '3'), - (0x4, '3'), - (0x5, '3'), - (0x6, '3'), - (0x7, '3'), - (0x8, '3'), - (0x9, '3'), - (0xA, '3'), - (0xB, '3'), - (0xC, '3'), - (0xD, '3'), - (0xE, '3'), - (0xF, '3'), - (0x10, '3'), - (0x11, '3'), - (0x12, '3'), - (0x13, '3'), - (0x14, '3'), - (0x15, '3'), - (0x16, '3'), - (0x17, '3'), - (0x18, '3'), - (0x19, '3'), - (0x1A, '3'), - (0x1B, '3'), - (0x1C, '3'), - (0x1D, '3'), - (0x1E, '3'), - (0x1F, '3'), - (0x20, '3'), - (0x21, '3'), - (0x22, '3'), - (0x23, '3'), - (0x24, '3'), - (0x25, '3'), - (0x26, '3'), - (0x27, '3'), - (0x28, '3'), - (0x29, '3'), - (0x2A, '3'), - (0x2B, '3'), - (0x2C, '3'), - (0x2D, 'V'), - (0x2E, 'V'), - (0x2F, '3'), - (0x30, 'V'), - (0x31, 'V'), - (0x32, 'V'), - (0x33, 'V'), - (0x34, 'V'), - (0x35, 'V'), - (0x36, 'V'), - (0x37, 'V'), - (0x38, 'V'), - (0x39, 'V'), - (0x3A, '3'), - (0x3B, '3'), - (0x3C, '3'), - (0x3D, '3'), - (0x3E, '3'), - (0x3F, '3'), - (0x40, '3'), - (0x41, 'M', u'a'), - (0x42, 'M', u'b'), - (0x43, 'M', u'c'), - (0x44, 'M', u'd'), - (0x45, 'M', u'e'), - (0x46, 'M', u'f'), - (0x47, 'M', u'g'), - (0x48, 'M', u'h'), - (0x49, 'M', u'i'), - (0x4A, 'M', u'j'), - (0x4B, 'M', u'k'), - (0x4C, 'M', u'l'), - (0x4D, 'M', u'm'), - (0x4E, 'M', u'n'), - (0x4F, 'M', u'o'), - (0x50, 'M', u'p'), - (0x51, 'M', u'q'), - (0x52, 'M', u'r'), - (0x53, 'M', u's'), - (0x54, 'M', u't'), - (0x55, 'M', u'u'), - (0x56, 'M', u'v'), - (0x57, 'M', u'w'), - (0x58, 'M', u'x'), - (0x59, 'M', u'y'), - (0x5A, 'M', u'z'), - (0x5B, '3'), - (0x5C, '3'), - (0x5D, '3'), - (0x5E, '3'), - (0x5F, '3'), - (0x60, '3'), - (0x61, 'V'), - (0x62, 'V'), - (0x63, 'V'), - ] - -def _seg_1(): - return [ - (0x64, 'V'), - (0x65, 'V'), - (0x66, 'V'), - (0x67, 'V'), - (0x68, 'V'), - (0x69, 'V'), - (0x6A, 'V'), - (0x6B, 'V'), - (0x6C, 'V'), - (0x6D, 'V'), - (0x6E, 'V'), - (0x6F, 'V'), - (0x70, 'V'), - (0x71, 'V'), - (0x72, 'V'), - (0x73, 'V'), - (0x74, 'V'), - (0x75, 'V'), - (0x76, 'V'), - (0x77, 'V'), - (0x78, 'V'), - (0x79, 'V'), - (0x7A, 'V'), - (0x7B, '3'), - (0x7C, '3'), - (0x7D, '3'), - (0x7E, '3'), - (0x7F, '3'), - (0x80, 'X'), - (0x81, 'X'), - (0x82, 'X'), - (0x83, 'X'), - (0x84, 'X'), - (0x85, 'X'), - (0x86, 'X'), - (0x87, 'X'), - (0x88, 'X'), - (0x89, 'X'), - (0x8A, 'X'), - (0x8B, 'X'), - (0x8C, 'X'), - (0x8D, 'X'), - (0x8E, 'X'), - (0x8F, 'X'), - (0x90, 'X'), - (0x91, 'X'), - (0x92, 'X'), - (0x93, 'X'), - (0x94, 'X'), - (0x95, 'X'), - (0x96, 'X'), - (0x97, 'X'), - (0x98, 'X'), - (0x99, 'X'), - (0x9A, 'X'), - (0x9B, 'X'), - (0x9C, 'X'), - (0x9D, 'X'), - (0x9E, 'X'), - (0x9F, 'X'), - (0xA0, '3', u' '), - (0xA1, 'V'), - (0xA2, 'V'), - (0xA3, 'V'), - (0xA4, 'V'), - (0xA5, 'V'), - (0xA6, 'V'), - (0xA7, 'V'), - (0xA8, '3', u' ̈'), - (0xA9, 'V'), - (0xAA, 'M', u'a'), - (0xAB, 'V'), - (0xAC, 'V'), - (0xAD, 'I'), - (0xAE, 'V'), - (0xAF, '3', u' ̄'), - (0xB0, 'V'), - (0xB1, 'V'), - (0xB2, 'M', u'2'), - (0xB3, 'M', u'3'), - (0xB4, '3', u' ́'), - (0xB5, 'M', u'μ'), - (0xB6, 'V'), - (0xB7, 'V'), - (0xB8, '3', u' ̧'), - (0xB9, 'M', u'1'), - (0xBA, 'M', u'o'), - (0xBB, 'V'), - (0xBC, 'M', u'1⁄4'), - (0xBD, 'M', u'1⁄2'), - (0xBE, 'M', u'3⁄4'), - (0xBF, 'V'), - (0xC0, 'M', u'à'), - (0xC1, 'M', u'á'), - (0xC2, 'M', u'â'), - (0xC3, 'M', u'ã'), - (0xC4, 'M', u'ä'), - (0xC5, 'M', u'å'), - (0xC6, 'M', u'æ'), - (0xC7, 'M', u'ç'), - ] - -def _seg_2(): - return [ - (0xC8, 'M', u'è'), - (0xC9, 'M', u'é'), - (0xCA, 'M', u'ê'), - (0xCB, 'M', u'ë'), - (0xCC, 'M', u'ì'), - (0xCD, 'M', u'í'), - (0xCE, 'M', u'î'), - (0xCF, 'M', u'ï'), - (0xD0, 'M', u'ð'), - (0xD1, 'M', u'ñ'), - (0xD2, 'M', u'ò'), - (0xD3, 'M', u'ó'), - (0xD4, 'M', u'ô'), - (0xD5, 'M', u'õ'), - (0xD6, 'M', u'ö'), - (0xD7, 'V'), - (0xD8, 'M', u'ø'), - (0xD9, 'M', u'ù'), - (0xDA, 'M', u'ú'), - (0xDB, 'M', u'û'), - (0xDC, 'M', u'ü'), - (0xDD, 'M', u'ý'), - (0xDE, 'M', u'þ'), - (0xDF, 'D', u'ss'), - (0xE0, 'V'), - (0xE1, 'V'), - (0xE2, 'V'), - (0xE3, 'V'), - (0xE4, 'V'), - (0xE5, 'V'), - (0xE6, 'V'), - (0xE7, 'V'), - (0xE8, 'V'), - (0xE9, 'V'), - (0xEA, 'V'), - (0xEB, 'V'), - (0xEC, 'V'), - (0xED, 'V'), - (0xEE, 'V'), - (0xEF, 'V'), - (0xF0, 'V'), - (0xF1, 'V'), - (0xF2, 'V'), - (0xF3, 'V'), - (0xF4, 'V'), - (0xF5, 'V'), - (0xF6, 'V'), - (0xF7, 'V'), - (0xF8, 'V'), - (0xF9, 'V'), - (0xFA, 'V'), - (0xFB, 'V'), - (0xFC, 'V'), - (0xFD, 'V'), - (0xFE, 'V'), - (0xFF, 'V'), - (0x100, 'M', u'ā'), - (0x101, 'V'), - (0x102, 'M', u'ă'), - (0x103, 'V'), - (0x104, 'M', u'ą'), - (0x105, 'V'), - (0x106, 'M', u'ć'), - (0x107, 'V'), - (0x108, 'M', u'ĉ'), - (0x109, 'V'), - (0x10A, 'M', u'ċ'), - (0x10B, 'V'), - (0x10C, 'M', u'č'), - (0x10D, 'V'), - (0x10E, 'M', u'ď'), - (0x10F, 'V'), - (0x110, 'M', u'đ'), - (0x111, 'V'), - (0x112, 'M', u'ē'), - (0x113, 'V'), - (0x114, 'M', u'ĕ'), - (0x115, 'V'), - (0x116, 'M', u'ė'), - (0x117, 'V'), - (0x118, 'M', u'ę'), - (0x119, 'V'), - (0x11A, 'M', u'ě'), - (0x11B, 'V'), - (0x11C, 'M', u'ĝ'), - (0x11D, 'V'), - (0x11E, 'M', u'ğ'), - (0x11F, 'V'), - (0x120, 'M', u'ġ'), - (0x121, 'V'), - (0x122, 'M', u'ģ'), - (0x123, 'V'), - (0x124, 'M', u'ĥ'), - (0x125, 'V'), - (0x126, 'M', u'ħ'), - (0x127, 'V'), - (0x128, 'M', u'ĩ'), - (0x129, 'V'), - (0x12A, 'M', u'ī'), - (0x12B, 'V'), - ] - -def _seg_3(): - return [ - (0x12C, 'M', u'ĭ'), - (0x12D, 'V'), - (0x12E, 'M', u'į'), - (0x12F, 'V'), - (0x130, 'M', u'i̇'), - (0x131, 'V'), - (0x132, 'M', u'ij'), - (0x134, 'M', u'ĵ'), - (0x135, 'V'), - (0x136, 'M', u'ķ'), - (0x137, 'V'), - (0x139, 'M', u'ĺ'), - (0x13A, 'V'), - (0x13B, 'M', u'ļ'), - (0x13C, 'V'), - (0x13D, 'M', u'ľ'), - (0x13E, 'V'), - (0x13F, 'M', u'l·'), - (0x141, 'M', u'ł'), - (0x142, 'V'), - (0x143, 'M', u'ń'), - (0x144, 'V'), - (0x145, 'M', u'ņ'), - (0x146, 'V'), - (0x147, 'M', u'ň'), - (0x148, 'V'), - (0x149, 'M', u'ʼn'), - (0x14A, 'M', u'ŋ'), - (0x14B, 'V'), - (0x14C, 'M', u'ō'), - (0x14D, 'V'), - (0x14E, 'M', u'ŏ'), - (0x14F, 'V'), - (0x150, 'M', u'ő'), - (0x151, 'V'), - (0x152, 'M', u'œ'), - (0x153, 'V'), - (0x154, 'M', u'ŕ'), - (0x155, 'V'), - (0x156, 'M', u'ŗ'), - (0x157, 'V'), - (0x158, 'M', u'ř'), - (0x159, 'V'), - (0x15A, 'M', u'ś'), - (0x15B, 'V'), - (0x15C, 'M', u'ŝ'), - (0x15D, 'V'), - (0x15E, 'M', u'ş'), - (0x15F, 'V'), - (0x160, 'M', u'š'), - (0x161, 'V'), - (0x162, 'M', u'ţ'), - (0x163, 'V'), - (0x164, 'M', u'ť'), - (0x165, 'V'), - (0x166, 'M', u'ŧ'), - (0x167, 'V'), - (0x168, 'M', u'ũ'), - (0x169, 'V'), - (0x16A, 'M', u'ū'), - (0x16B, 'V'), - (0x16C, 'M', u'ŭ'), - (0x16D, 'V'), - (0x16E, 'M', u'ů'), - (0x16F, 'V'), - (0x170, 'M', u'ű'), - (0x171, 'V'), - (0x172, 'M', u'ų'), - (0x173, 'V'), - (0x174, 'M', u'ŵ'), - (0x175, 'V'), - (0x176, 'M', u'ŷ'), - (0x177, 'V'), - (0x178, 'M', u'ÿ'), - (0x179, 'M', u'ź'), - (0x17A, 'V'), - (0x17B, 'M', u'ż'), - (0x17C, 'V'), - (0x17D, 'M', u'ž'), - (0x17E, 'V'), - (0x17F, 'M', u's'), - (0x180, 'V'), - (0x181, 'M', u'ɓ'), - (0x182, 'M', u'ƃ'), - (0x183, 'V'), - (0x184, 'M', u'ƅ'), - (0x185, 'V'), - (0x186, 'M', u'ɔ'), - (0x187, 'M', u'ƈ'), - (0x188, 'V'), - (0x189, 'M', u'ɖ'), - (0x18A, 'M', u'ɗ'), - (0x18B, 'M', u'ƌ'), - (0x18C, 'V'), - (0x18E, 'M', u'ǝ'), - (0x18F, 'M', u'ə'), - (0x190, 'M', u'ɛ'), - (0x191, 'M', u'ƒ'), - (0x192, 'V'), - (0x193, 'M', u'ɠ'), - ] - -def _seg_4(): - return [ - (0x194, 'M', u'ɣ'), - (0x195, 'V'), - (0x196, 'M', u'ɩ'), - (0x197, 'M', u'ɨ'), - (0x198, 'M', u'ƙ'), - (0x199, 'V'), - (0x19C, 'M', u'ɯ'), - (0x19D, 'M', u'ɲ'), - (0x19E, 'V'), - (0x19F, 'M', u'ɵ'), - (0x1A0, 'M', u'ơ'), - (0x1A1, 'V'), - (0x1A2, 'M', u'ƣ'), - (0x1A3, 'V'), - (0x1A4, 'M', u'ƥ'), - (0x1A5, 'V'), - (0x1A6, 'M', u'ʀ'), - (0x1A7, 'M', u'ƨ'), - (0x1A8, 'V'), - (0x1A9, 'M', u'ʃ'), - (0x1AA, 'V'), - (0x1AC, 'M', u'ƭ'), - (0x1AD, 'V'), - (0x1AE, 'M', u'ʈ'), - (0x1AF, 'M', u'ư'), - (0x1B0, 'V'), - (0x1B1, 'M', u'ʊ'), - (0x1B2, 'M', u'ʋ'), - (0x1B3, 'M', u'ƴ'), - (0x1B4, 'V'), - (0x1B5, 'M', u'ƶ'), - (0x1B6, 'V'), - (0x1B7, 'M', u'ʒ'), - (0x1B8, 'M', u'ƹ'), - (0x1B9, 'V'), - (0x1BC, 'M', u'ƽ'), - (0x1BD, 'V'), - (0x1C4, 'M', u'dž'), - (0x1C7, 'M', u'lj'), - (0x1CA, 'M', u'nj'), - (0x1CD, 'M', u'ǎ'), - (0x1CE, 'V'), - (0x1CF, 'M', u'ǐ'), - (0x1D0, 'V'), - (0x1D1, 'M', u'ǒ'), - (0x1D2, 'V'), - (0x1D3, 'M', u'ǔ'), - (0x1D4, 'V'), - (0x1D5, 'M', u'ǖ'), - (0x1D6, 'V'), - (0x1D7, 'M', u'ǘ'), - (0x1D8, 'V'), - (0x1D9, 'M', u'ǚ'), - (0x1DA, 'V'), - (0x1DB, 'M', u'ǜ'), - (0x1DC, 'V'), - (0x1DE, 'M', u'ǟ'), - (0x1DF, 'V'), - (0x1E0, 'M', u'ǡ'), - (0x1E1, 'V'), - (0x1E2, 'M', u'ǣ'), - (0x1E3, 'V'), - (0x1E4, 'M', u'ǥ'), - (0x1E5, 'V'), - (0x1E6, 'M', u'ǧ'), - (0x1E7, 'V'), - (0x1E8, 'M', u'ǩ'), - (0x1E9, 'V'), - (0x1EA, 'M', u'ǫ'), - (0x1EB, 'V'), - (0x1EC, 'M', u'ǭ'), - (0x1ED, 'V'), - (0x1EE, 'M', u'ǯ'), - (0x1EF, 'V'), - (0x1F1, 'M', u'dz'), - (0x1F4, 'M', u'ǵ'), - (0x1F5, 'V'), - (0x1F6, 'M', u'ƕ'), - (0x1F7, 'M', u'ƿ'), - (0x1F8, 'M', u'ǹ'), - (0x1F9, 'V'), - (0x1FA, 'M', u'ǻ'), - (0x1FB, 'V'), - (0x1FC, 'M', u'ǽ'), - (0x1FD, 'V'), - (0x1FE, 'M', u'ǿ'), - (0x1FF, 'V'), - (0x200, 'M', u'ȁ'), - (0x201, 'V'), - (0x202, 'M', u'ȃ'), - (0x203, 'V'), - (0x204, 'M', u'ȅ'), - (0x205, 'V'), - (0x206, 'M', u'ȇ'), - (0x207, 'V'), - (0x208, 'M', u'ȉ'), - (0x209, 'V'), - (0x20A, 'M', u'ȋ'), - (0x20B, 'V'), - (0x20C, 'M', u'ȍ'), - ] - -def _seg_5(): - return [ - (0x20D, 'V'), - (0x20E, 'M', u'ȏ'), - (0x20F, 'V'), - (0x210, 'M', u'ȑ'), - (0x211, 'V'), - (0x212, 'M', u'ȓ'), - (0x213, 'V'), - (0x214, 'M', u'ȕ'), - (0x215, 'V'), - (0x216, 'M', u'ȗ'), - (0x217, 'V'), - (0x218, 'M', u'ș'), - (0x219, 'V'), - (0x21A, 'M', u'ț'), - (0x21B, 'V'), - (0x21C, 'M', u'ȝ'), - (0x21D, 'V'), - (0x21E, 'M', u'ȟ'), - (0x21F, 'V'), - (0x220, 'M', u'ƞ'), - (0x221, 'V'), - (0x222, 'M', u'ȣ'), - (0x223, 'V'), - (0x224, 'M', u'ȥ'), - (0x225, 'V'), - (0x226, 'M', u'ȧ'), - (0x227, 'V'), - (0x228, 'M', u'ȩ'), - (0x229, 'V'), - (0x22A, 'M', u'ȫ'), - (0x22B, 'V'), - (0x22C, 'M', u'ȭ'), - (0x22D, 'V'), - (0x22E, 'M', u'ȯ'), - (0x22F, 'V'), - (0x230, 'M', u'ȱ'), - (0x231, 'V'), - (0x232, 'M', u'ȳ'), - (0x233, 'V'), - (0x23A, 'M', u'ⱥ'), - (0x23B, 'M', u'ȼ'), - (0x23C, 'V'), - (0x23D, 'M', u'ƚ'), - (0x23E, 'M', u'ⱦ'), - (0x23F, 'V'), - (0x241, 'M', u'ɂ'), - (0x242, 'V'), - (0x243, 'M', u'ƀ'), - (0x244, 'M', u'ʉ'), - (0x245, 'M', u'ʌ'), - (0x246, 'M', u'ɇ'), - (0x247, 'V'), - (0x248, 'M', u'ɉ'), - (0x249, 'V'), - (0x24A, 'M', u'ɋ'), - (0x24B, 'V'), - (0x24C, 'M', u'ɍ'), - (0x24D, 'V'), - (0x24E, 'M', u'ɏ'), - (0x24F, 'V'), - (0x2B0, 'M', u'h'), - (0x2B1, 'M', u'ɦ'), - (0x2B2, 'M', u'j'), - (0x2B3, 'M', u'r'), - (0x2B4, 'M', u'ɹ'), - (0x2B5, 'M', u'ɻ'), - (0x2B6, 'M', u'ʁ'), - (0x2B7, 'M', u'w'), - (0x2B8, 'M', u'y'), - (0x2B9, 'V'), - (0x2D8, '3', u' ̆'), - (0x2D9, '3', u' ̇'), - (0x2DA, '3', u' ̊'), - (0x2DB, '3', u' ̨'), - (0x2DC, '3', u' ̃'), - (0x2DD, '3', u' ̋'), - (0x2DE, 'V'), - (0x2E0, 'M', u'ɣ'), - (0x2E1, 'M', u'l'), - (0x2E2, 'M', u's'), - (0x2E3, 'M', u'x'), - (0x2E4, 'M', u'ʕ'), - (0x2E5, 'V'), - (0x340, 'M', u'̀'), - (0x341, 'M', u'́'), - (0x342, 'V'), - (0x343, 'M', u'̓'), - (0x344, 'M', u'̈́'), - (0x345, 'M', u'ι'), - (0x346, 'V'), - (0x34F, 'I'), - (0x350, 'V'), - (0x370, 'M', u'ͱ'), - (0x371, 'V'), - (0x372, 'M', u'ͳ'), - (0x373, 'V'), - (0x374, 'M', u'ʹ'), - (0x375, 'V'), - (0x376, 'M', u'ͷ'), - (0x377, 'V'), - ] - -def _seg_6(): - return [ - (0x378, 'X'), - (0x37A, '3', u' ι'), - (0x37B, 'V'), - (0x37E, '3', u';'), - (0x37F, 'M', u'ϳ'), - (0x380, 'X'), - (0x384, '3', u' ́'), - (0x385, '3', u' ̈́'), - (0x386, 'M', u'ά'), - (0x387, 'M', u'·'), - (0x388, 'M', u'έ'), - (0x389, 'M', u'ή'), - (0x38A, 'M', u'ί'), - (0x38B, 'X'), - (0x38C, 'M', u'ό'), - (0x38D, 'X'), - (0x38E, 'M', u'ύ'), - (0x38F, 'M', u'ώ'), - (0x390, 'V'), - (0x391, 'M', u'α'), - (0x392, 'M', u'β'), - (0x393, 'M', u'γ'), - (0x394, 'M', u'δ'), - (0x395, 'M', u'ε'), - (0x396, 'M', u'ζ'), - (0x397, 'M', u'η'), - (0x398, 'M', u'θ'), - (0x399, 'M', u'ι'), - (0x39A, 'M', u'κ'), - (0x39B, 'M', u'λ'), - (0x39C, 'M', u'μ'), - (0x39D, 'M', u'ν'), - (0x39E, 'M', u'ξ'), - (0x39F, 'M', u'ο'), - (0x3A0, 'M', u'π'), - (0x3A1, 'M', u'ρ'), - (0x3A2, 'X'), - (0x3A3, 'M', u'σ'), - (0x3A4, 'M', u'τ'), - (0x3A5, 'M', u'υ'), - (0x3A6, 'M', u'φ'), - (0x3A7, 'M', u'χ'), - (0x3A8, 'M', u'ψ'), - (0x3A9, 'M', u'ω'), - (0x3AA, 'M', u'ϊ'), - (0x3AB, 'M', u'ϋ'), - (0x3AC, 'V'), - (0x3C2, 'D', u'σ'), - (0x3C3, 'V'), - (0x3CF, 'M', u'ϗ'), - (0x3D0, 'M', u'β'), - (0x3D1, 'M', u'θ'), - (0x3D2, 'M', u'υ'), - (0x3D3, 'M', u'ύ'), - (0x3D4, 'M', u'ϋ'), - (0x3D5, 'M', u'φ'), - (0x3D6, 'M', u'π'), - (0x3D7, 'V'), - (0x3D8, 'M', u'ϙ'), - (0x3D9, 'V'), - (0x3DA, 'M', u'ϛ'), - (0x3DB, 'V'), - (0x3DC, 'M', u'ϝ'), - (0x3DD, 'V'), - (0x3DE, 'M', u'ϟ'), - (0x3DF, 'V'), - (0x3E0, 'M', u'ϡ'), - (0x3E1, 'V'), - (0x3E2, 'M', u'ϣ'), - (0x3E3, 'V'), - (0x3E4, 'M', u'ϥ'), - (0x3E5, 'V'), - (0x3E6, 'M', u'ϧ'), - (0x3E7, 'V'), - (0x3E8, 'M', u'ϩ'), - (0x3E9, 'V'), - (0x3EA, 'M', u'ϫ'), - (0x3EB, 'V'), - (0x3EC, 'M', u'ϭ'), - (0x3ED, 'V'), - (0x3EE, 'M', u'ϯ'), - (0x3EF, 'V'), - (0x3F0, 'M', u'κ'), - (0x3F1, 'M', u'ρ'), - (0x3F2, 'M', u'σ'), - (0x3F3, 'V'), - (0x3F4, 'M', u'θ'), - (0x3F5, 'M', u'ε'), - (0x3F6, 'V'), - (0x3F7, 'M', u'ϸ'), - (0x3F8, 'V'), - (0x3F9, 'M', u'σ'), - (0x3FA, 'M', u'ϻ'), - (0x3FB, 'V'), - (0x3FD, 'M', u'ͻ'), - (0x3FE, 'M', u'ͼ'), - (0x3FF, 'M', u'ͽ'), - (0x400, 'M', u'ѐ'), - (0x401, 'M', u'ё'), - (0x402, 'M', u'ђ'), - ] - -def _seg_7(): - return [ - (0x403, 'M', u'ѓ'), - (0x404, 'M', u'є'), - (0x405, 'M', u'ѕ'), - (0x406, 'M', u'і'), - (0x407, 'M', u'ї'), - (0x408, 'M', u'ј'), - (0x409, 'M', u'љ'), - (0x40A, 'M', u'њ'), - (0x40B, 'M', u'ћ'), - (0x40C, 'M', u'ќ'), - (0x40D, 'M', u'ѝ'), - (0x40E, 'M', u'ў'), - (0x40F, 'M', u'џ'), - (0x410, 'M', u'а'), - (0x411, 'M', u'б'), - (0x412, 'M', u'в'), - (0x413, 'M', u'г'), - (0x414, 'M', u'д'), - (0x415, 'M', u'е'), - (0x416, 'M', u'ж'), - (0x417, 'M', u'з'), - (0x418, 'M', u'и'), - (0x419, 'M', u'й'), - (0x41A, 'M', u'к'), - (0x41B, 'M', u'л'), - (0x41C, 'M', u'м'), - (0x41D, 'M', u'н'), - (0x41E, 'M', u'о'), - (0x41F, 'M', u'п'), - (0x420, 'M', u'р'), - (0x421, 'M', u'с'), - (0x422, 'M', u'т'), - (0x423, 'M', u'у'), - (0x424, 'M', u'ф'), - (0x425, 'M', u'х'), - (0x426, 'M', u'ц'), - (0x427, 'M', u'ч'), - (0x428, 'M', u'ш'), - (0x429, 'M', u'щ'), - (0x42A, 'M', u'ъ'), - (0x42B, 'M', u'ы'), - (0x42C, 'M', u'ь'), - (0x42D, 'M', u'э'), - (0x42E, 'M', u'ю'), - (0x42F, 'M', u'я'), - (0x430, 'V'), - (0x460, 'M', u'ѡ'), - (0x461, 'V'), - (0x462, 'M', u'ѣ'), - (0x463, 'V'), - (0x464, 'M', u'ѥ'), - (0x465, 'V'), - (0x466, 'M', u'ѧ'), - (0x467, 'V'), - (0x468, 'M', u'ѩ'), - (0x469, 'V'), - (0x46A, 'M', u'ѫ'), - (0x46B, 'V'), - (0x46C, 'M', u'ѭ'), - (0x46D, 'V'), - (0x46E, 'M', u'ѯ'), - (0x46F, 'V'), - (0x470, 'M', u'ѱ'), - (0x471, 'V'), - (0x472, 'M', u'ѳ'), - (0x473, 'V'), - (0x474, 'M', u'ѵ'), - (0x475, 'V'), - (0x476, 'M', u'ѷ'), - (0x477, 'V'), - (0x478, 'M', u'ѹ'), - (0x479, 'V'), - (0x47A, 'M', u'ѻ'), - (0x47B, 'V'), - (0x47C, 'M', u'ѽ'), - (0x47D, 'V'), - (0x47E, 'M', u'ѿ'), - (0x47F, 'V'), - (0x480, 'M', u'ҁ'), - (0x481, 'V'), - (0x48A, 'M', u'ҋ'), - (0x48B, 'V'), - (0x48C, 'M', u'ҍ'), - (0x48D, 'V'), - (0x48E, 'M', u'ҏ'), - (0x48F, 'V'), - (0x490, 'M', u'ґ'), - (0x491, 'V'), - (0x492, 'M', u'ғ'), - (0x493, 'V'), - (0x494, 'M', u'ҕ'), - (0x495, 'V'), - (0x496, 'M', u'җ'), - (0x497, 'V'), - (0x498, 'M', u'ҙ'), - (0x499, 'V'), - (0x49A, 'M', u'қ'), - (0x49B, 'V'), - (0x49C, 'M', u'ҝ'), - (0x49D, 'V'), - ] - -def _seg_8(): - return [ - (0x49E, 'M', u'ҟ'), - (0x49F, 'V'), - (0x4A0, 'M', u'ҡ'), - (0x4A1, 'V'), - (0x4A2, 'M', u'ң'), - (0x4A3, 'V'), - (0x4A4, 'M', u'ҥ'), - (0x4A5, 'V'), - (0x4A6, 'M', u'ҧ'), - (0x4A7, 'V'), - (0x4A8, 'M', u'ҩ'), - (0x4A9, 'V'), - (0x4AA, 'M', u'ҫ'), - (0x4AB, 'V'), - (0x4AC, 'M', u'ҭ'), - (0x4AD, 'V'), - (0x4AE, 'M', u'ү'), - (0x4AF, 'V'), - (0x4B0, 'M', u'ұ'), - (0x4B1, 'V'), - (0x4B2, 'M', u'ҳ'), - (0x4B3, 'V'), - (0x4B4, 'M', u'ҵ'), - (0x4B5, 'V'), - (0x4B6, 'M', u'ҷ'), - (0x4B7, 'V'), - (0x4B8, 'M', u'ҹ'), - (0x4B9, 'V'), - (0x4BA, 'M', u'һ'), - (0x4BB, 'V'), - (0x4BC, 'M', u'ҽ'), - (0x4BD, 'V'), - (0x4BE, 'M', u'ҿ'), - (0x4BF, 'V'), - (0x4C0, 'X'), - (0x4C1, 'M', u'ӂ'), - (0x4C2, 'V'), - (0x4C3, 'M', u'ӄ'), - (0x4C4, 'V'), - (0x4C5, 'M', u'ӆ'), - (0x4C6, 'V'), - (0x4C7, 'M', u'ӈ'), - (0x4C8, 'V'), - (0x4C9, 'M', u'ӊ'), - (0x4CA, 'V'), - (0x4CB, 'M', u'ӌ'), - (0x4CC, 'V'), - (0x4CD, 'M', u'ӎ'), - (0x4CE, 'V'), - (0x4D0, 'M', u'ӑ'), - (0x4D1, 'V'), - (0x4D2, 'M', u'ӓ'), - (0x4D3, 'V'), - (0x4D4, 'M', u'ӕ'), - (0x4D5, 'V'), - (0x4D6, 'M', u'ӗ'), - (0x4D7, 'V'), - (0x4D8, 'M', u'ә'), - (0x4D9, 'V'), - (0x4DA, 'M', u'ӛ'), - (0x4DB, 'V'), - (0x4DC, 'M', u'ӝ'), - (0x4DD, 'V'), - (0x4DE, 'M', u'ӟ'), - (0x4DF, 'V'), - (0x4E0, 'M', u'ӡ'), - (0x4E1, 'V'), - (0x4E2, 'M', u'ӣ'), - (0x4E3, 'V'), - (0x4E4, 'M', u'ӥ'), - (0x4E5, 'V'), - (0x4E6, 'M', u'ӧ'), - (0x4E7, 'V'), - (0x4E8, 'M', u'ө'), - (0x4E9, 'V'), - (0x4EA, 'M', u'ӫ'), - (0x4EB, 'V'), - (0x4EC, 'M', u'ӭ'), - (0x4ED, 'V'), - (0x4EE, 'M', u'ӯ'), - (0x4EF, 'V'), - (0x4F0, 'M', u'ӱ'), - (0x4F1, 'V'), - (0x4F2, 'M', u'ӳ'), - (0x4F3, 'V'), - (0x4F4, 'M', u'ӵ'), - (0x4F5, 'V'), - (0x4F6, 'M', u'ӷ'), - (0x4F7, 'V'), - (0x4F8, 'M', u'ӹ'), - (0x4F9, 'V'), - (0x4FA, 'M', u'ӻ'), - (0x4FB, 'V'), - (0x4FC, 'M', u'ӽ'), - (0x4FD, 'V'), - (0x4FE, 'M', u'ӿ'), - (0x4FF, 'V'), - (0x500, 'M', u'ԁ'), - (0x501, 'V'), - (0x502, 'M', u'ԃ'), - ] - -def _seg_9(): - return [ - (0x503, 'V'), - (0x504, 'M', u'ԅ'), - (0x505, 'V'), - (0x506, 'M', u'ԇ'), - (0x507, 'V'), - (0x508, 'M', u'ԉ'), - (0x509, 'V'), - (0x50A, 'M', u'ԋ'), - (0x50B, 'V'), - (0x50C, 'M', u'ԍ'), - (0x50D, 'V'), - (0x50E, 'M', u'ԏ'), - (0x50F, 'V'), - (0x510, 'M', u'ԑ'), - (0x511, 'V'), - (0x512, 'M', u'ԓ'), - (0x513, 'V'), - (0x514, 'M', u'ԕ'), - (0x515, 'V'), - (0x516, 'M', u'ԗ'), - (0x517, 'V'), - (0x518, 'M', u'ԙ'), - (0x519, 'V'), - (0x51A, 'M', u'ԛ'), - (0x51B, 'V'), - (0x51C, 'M', u'ԝ'), - (0x51D, 'V'), - (0x51E, 'M', u'ԟ'), - (0x51F, 'V'), - (0x520, 'M', u'ԡ'), - (0x521, 'V'), - (0x522, 'M', u'ԣ'), - (0x523, 'V'), - (0x524, 'M', u'ԥ'), - (0x525, 'V'), - (0x526, 'M', u'ԧ'), - (0x527, 'V'), - (0x528, 'M', u'ԩ'), - (0x529, 'V'), - (0x52A, 'M', u'ԫ'), - (0x52B, 'V'), - (0x52C, 'M', u'ԭ'), - (0x52D, 'V'), - (0x52E, 'M', u'ԯ'), - (0x52F, 'V'), - (0x530, 'X'), - (0x531, 'M', u'ա'), - (0x532, 'M', u'բ'), - (0x533, 'M', u'գ'), - (0x534, 'M', u'դ'), - (0x535, 'M', u'ե'), - (0x536, 'M', u'զ'), - (0x537, 'M', u'է'), - (0x538, 'M', u'ը'), - (0x539, 'M', u'թ'), - (0x53A, 'M', u'ժ'), - (0x53B, 'M', u'ի'), - (0x53C, 'M', u'լ'), - (0x53D, 'M', u'խ'), - (0x53E, 'M', u'ծ'), - (0x53F, 'M', u'կ'), - (0x540, 'M', u'հ'), - (0x541, 'M', u'ձ'), - (0x542, 'M', u'ղ'), - (0x543, 'M', u'ճ'), - (0x544, 'M', u'մ'), - (0x545, 'M', u'յ'), - (0x546, 'M', u'ն'), - (0x547, 'M', u'շ'), - (0x548, 'M', u'ո'), - (0x549, 'M', u'չ'), - (0x54A, 'M', u'պ'), - (0x54B, 'M', u'ջ'), - (0x54C, 'M', u'ռ'), - (0x54D, 'M', u'ս'), - (0x54E, 'M', u'վ'), - (0x54F, 'M', u'տ'), - (0x550, 'M', u'ր'), - (0x551, 'M', u'ց'), - (0x552, 'M', u'ւ'), - (0x553, 'M', u'փ'), - (0x554, 'M', u'ք'), - (0x555, 'M', u'օ'), - (0x556, 'M', u'ֆ'), - (0x557, 'X'), - (0x559, 'V'), - (0x587, 'M', u'եւ'), - (0x588, 'V'), - (0x58B, 'X'), - (0x58D, 'V'), - (0x590, 'X'), - (0x591, 'V'), - (0x5C8, 'X'), - (0x5D0, 'V'), - (0x5EB, 'X'), - (0x5EF, 'V'), - (0x5F5, 'X'), - (0x606, 'V'), - (0x61C, 'X'), - (0x61E, 'V'), - ] - -def _seg_10(): - return [ - (0x675, 'M', u'اٴ'), - (0x676, 'M', u'وٴ'), - (0x677, 'M', u'ۇٴ'), - (0x678, 'M', u'يٴ'), - (0x679, 'V'), - (0x6DD, 'X'), - (0x6DE, 'V'), - (0x70E, 'X'), - (0x710, 'V'), - (0x74B, 'X'), - (0x74D, 'V'), - (0x7B2, 'X'), - (0x7C0, 'V'), - (0x7FB, 'X'), - (0x7FD, 'V'), - (0x82E, 'X'), - (0x830, 'V'), - (0x83F, 'X'), - (0x840, 'V'), - (0x85C, 'X'), - (0x85E, 'V'), - (0x85F, 'X'), - (0x860, 'V'), - (0x86B, 'X'), - (0x8A0, 'V'), - (0x8B5, 'X'), - (0x8B6, 'V'), - (0x8BE, 'X'), - (0x8D3, 'V'), - (0x8E2, 'X'), - (0x8E3, 'V'), - (0x958, 'M', u'क़'), - (0x959, 'M', u'ख़'), - (0x95A, 'M', u'ग़'), - (0x95B, 'M', u'ज़'), - (0x95C, 'M', u'ड़'), - (0x95D, 'M', u'ढ़'), - (0x95E, 'M', u'फ़'), - (0x95F, 'M', u'य़'), - (0x960, 'V'), - (0x984, 'X'), - (0x985, 'V'), - (0x98D, 'X'), - (0x98F, 'V'), - (0x991, 'X'), - (0x993, 'V'), - (0x9A9, 'X'), - (0x9AA, 'V'), - (0x9B1, 'X'), - (0x9B2, 'V'), - (0x9B3, 'X'), - (0x9B6, 'V'), - (0x9BA, 'X'), - (0x9BC, 'V'), - (0x9C5, 'X'), - (0x9C7, 'V'), - (0x9C9, 'X'), - (0x9CB, 'V'), - (0x9CF, 'X'), - (0x9D7, 'V'), - (0x9D8, 'X'), - (0x9DC, 'M', u'ড়'), - (0x9DD, 'M', u'ঢ়'), - (0x9DE, 'X'), - (0x9DF, 'M', u'য়'), - (0x9E0, 'V'), - (0x9E4, 'X'), - (0x9E6, 'V'), - (0x9FF, 'X'), - (0xA01, 'V'), - (0xA04, 'X'), - (0xA05, 'V'), - (0xA0B, 'X'), - (0xA0F, 'V'), - (0xA11, 'X'), - (0xA13, 'V'), - (0xA29, 'X'), - (0xA2A, 'V'), - (0xA31, 'X'), - (0xA32, 'V'), - (0xA33, 'M', u'ਲ਼'), - (0xA34, 'X'), - (0xA35, 'V'), - (0xA36, 'M', u'ਸ਼'), - (0xA37, 'X'), - (0xA38, 'V'), - (0xA3A, 'X'), - (0xA3C, 'V'), - (0xA3D, 'X'), - (0xA3E, 'V'), - (0xA43, 'X'), - (0xA47, 'V'), - (0xA49, 'X'), - (0xA4B, 'V'), - (0xA4E, 'X'), - (0xA51, 'V'), - (0xA52, 'X'), - (0xA59, 'M', u'ਖ਼'), - (0xA5A, 'M', u'ਗ਼'), - (0xA5B, 'M', u'ਜ਼'), - ] - -def _seg_11(): - return [ - (0xA5C, 'V'), - (0xA5D, 'X'), - (0xA5E, 'M', u'ਫ਼'), - (0xA5F, 'X'), - (0xA66, 'V'), - (0xA77, 'X'), - (0xA81, 'V'), - (0xA84, 'X'), - (0xA85, 'V'), - (0xA8E, 'X'), - (0xA8F, 'V'), - (0xA92, 'X'), - (0xA93, 'V'), - (0xAA9, 'X'), - (0xAAA, 'V'), - (0xAB1, 'X'), - (0xAB2, 'V'), - (0xAB4, 'X'), - (0xAB5, 'V'), - (0xABA, 'X'), - (0xABC, 'V'), - (0xAC6, 'X'), - (0xAC7, 'V'), - (0xACA, 'X'), - (0xACB, 'V'), - (0xACE, 'X'), - (0xAD0, 'V'), - (0xAD1, 'X'), - (0xAE0, 'V'), - (0xAE4, 'X'), - (0xAE6, 'V'), - (0xAF2, 'X'), - (0xAF9, 'V'), - (0xB00, 'X'), - (0xB01, 'V'), - (0xB04, 'X'), - (0xB05, 'V'), - (0xB0D, 'X'), - (0xB0F, 'V'), - (0xB11, 'X'), - (0xB13, 'V'), - (0xB29, 'X'), - (0xB2A, 'V'), - (0xB31, 'X'), - (0xB32, 'V'), - (0xB34, 'X'), - (0xB35, 'V'), - (0xB3A, 'X'), - (0xB3C, 'V'), - (0xB45, 'X'), - (0xB47, 'V'), - (0xB49, 'X'), - (0xB4B, 'V'), - (0xB4E, 'X'), - (0xB56, 'V'), - (0xB58, 'X'), - (0xB5C, 'M', u'ଡ଼'), - (0xB5D, 'M', u'ଢ଼'), - (0xB5E, 'X'), - (0xB5F, 'V'), - (0xB64, 'X'), - (0xB66, 'V'), - (0xB78, 'X'), - (0xB82, 'V'), - (0xB84, 'X'), - (0xB85, 'V'), - (0xB8B, 'X'), - (0xB8E, 'V'), - (0xB91, 'X'), - (0xB92, 'V'), - (0xB96, 'X'), - (0xB99, 'V'), - (0xB9B, 'X'), - (0xB9C, 'V'), - (0xB9D, 'X'), - (0xB9E, 'V'), - (0xBA0, 'X'), - (0xBA3, 'V'), - (0xBA5, 'X'), - (0xBA8, 'V'), - (0xBAB, 'X'), - (0xBAE, 'V'), - (0xBBA, 'X'), - (0xBBE, 'V'), - (0xBC3, 'X'), - (0xBC6, 'V'), - (0xBC9, 'X'), - (0xBCA, 'V'), - (0xBCE, 'X'), - (0xBD0, 'V'), - (0xBD1, 'X'), - (0xBD7, 'V'), - (0xBD8, 'X'), - (0xBE6, 'V'), - (0xBFB, 'X'), - (0xC00, 'V'), - (0xC0D, 'X'), - (0xC0E, 'V'), - (0xC11, 'X'), - (0xC12, 'V'), - ] - -def _seg_12(): - return [ - (0xC29, 'X'), - (0xC2A, 'V'), - (0xC3A, 'X'), - (0xC3D, 'V'), - (0xC45, 'X'), - (0xC46, 'V'), - (0xC49, 'X'), - (0xC4A, 'V'), - (0xC4E, 'X'), - (0xC55, 'V'), - (0xC57, 'X'), - (0xC58, 'V'), - (0xC5B, 'X'), - (0xC60, 'V'), - (0xC64, 'X'), - (0xC66, 'V'), - (0xC70, 'X'), - (0xC78, 'V'), - (0xC8D, 'X'), - (0xC8E, 'V'), - (0xC91, 'X'), - (0xC92, 'V'), - (0xCA9, 'X'), - (0xCAA, 'V'), - (0xCB4, 'X'), - (0xCB5, 'V'), - (0xCBA, 'X'), - (0xCBC, 'V'), - (0xCC5, 'X'), - (0xCC6, 'V'), - (0xCC9, 'X'), - (0xCCA, 'V'), - (0xCCE, 'X'), - (0xCD5, 'V'), - (0xCD7, 'X'), - (0xCDE, 'V'), - (0xCDF, 'X'), - (0xCE0, 'V'), - (0xCE4, 'X'), - (0xCE6, 'V'), - (0xCF0, 'X'), - (0xCF1, 'V'), - (0xCF3, 'X'), - (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), - (0xD0D, 'X'), - (0xD0E, 'V'), - (0xD11, 'X'), - (0xD12, 'V'), - (0xD45, 'X'), - (0xD46, 'V'), - (0xD49, 'X'), - (0xD4A, 'V'), - (0xD50, 'X'), - (0xD54, 'V'), - (0xD64, 'X'), - (0xD66, 'V'), - (0xD80, 'X'), - (0xD82, 'V'), - (0xD84, 'X'), - (0xD85, 'V'), - (0xD97, 'X'), - (0xD9A, 'V'), - (0xDB2, 'X'), - (0xDB3, 'V'), - (0xDBC, 'X'), - (0xDBD, 'V'), - (0xDBE, 'X'), - (0xDC0, 'V'), - (0xDC7, 'X'), - (0xDCA, 'V'), - (0xDCB, 'X'), - (0xDCF, 'V'), - (0xDD5, 'X'), - (0xDD6, 'V'), - (0xDD7, 'X'), - (0xDD8, 'V'), - (0xDE0, 'X'), - (0xDE6, 'V'), - (0xDF0, 'X'), - (0xDF2, 'V'), - (0xDF5, 'X'), - (0xE01, 'V'), - (0xE33, 'M', u'ํา'), - (0xE34, 'V'), - (0xE3B, 'X'), - (0xE3F, 'V'), - (0xE5C, 'X'), - (0xE81, 'V'), - (0xE83, 'X'), - (0xE84, 'V'), - (0xE85, 'X'), - (0xE87, 'V'), - (0xE89, 'X'), - (0xE8A, 'V'), - (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - ] - -def _seg_13(): - return [ - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), - (0xEA4, 'X'), - (0xEA5, 'V'), - (0xEA6, 'X'), - (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), - (0xEB3, 'M', u'ໍາ'), - (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), - (0xEBE, 'X'), - (0xEC0, 'V'), - (0xEC5, 'X'), - (0xEC6, 'V'), - (0xEC7, 'X'), - (0xEC8, 'V'), - (0xECE, 'X'), - (0xED0, 'V'), - (0xEDA, 'X'), - (0xEDC, 'M', u'ຫນ'), - (0xEDD, 'M', u'ຫມ'), - (0xEDE, 'V'), - (0xEE0, 'X'), - (0xF00, 'V'), - (0xF0C, 'M', u'་'), - (0xF0D, 'V'), - (0xF43, 'M', u'གྷ'), - (0xF44, 'V'), - (0xF48, 'X'), - (0xF49, 'V'), - (0xF4D, 'M', u'ཌྷ'), - (0xF4E, 'V'), - (0xF52, 'M', u'དྷ'), - (0xF53, 'V'), - (0xF57, 'M', u'བྷ'), - (0xF58, 'V'), - (0xF5C, 'M', u'ཛྷ'), - (0xF5D, 'V'), - (0xF69, 'M', u'ཀྵ'), - (0xF6A, 'V'), - (0xF6D, 'X'), - (0xF71, 'V'), - (0xF73, 'M', u'ཱི'), - (0xF74, 'V'), - (0xF75, 'M', u'ཱུ'), - (0xF76, 'M', u'ྲྀ'), - (0xF77, 'M', u'ྲཱྀ'), - (0xF78, 'M', u'ླྀ'), - (0xF79, 'M', u'ླཱྀ'), - (0xF7A, 'V'), - (0xF81, 'M', u'ཱྀ'), - (0xF82, 'V'), - (0xF93, 'M', u'ྒྷ'), - (0xF94, 'V'), - (0xF98, 'X'), - (0xF99, 'V'), - (0xF9D, 'M', u'ྜྷ'), - (0xF9E, 'V'), - (0xFA2, 'M', u'ྡྷ'), - (0xFA3, 'V'), - (0xFA7, 'M', u'ྦྷ'), - (0xFA8, 'V'), - (0xFAC, 'M', u'ྫྷ'), - (0xFAD, 'V'), - (0xFB9, 'M', u'ྐྵ'), - (0xFBA, 'V'), - (0xFBD, 'X'), - (0xFBE, 'V'), - (0xFCD, 'X'), - (0xFCE, 'V'), - (0xFDB, 'X'), - (0x1000, 'V'), - (0x10A0, 'X'), - (0x10C7, 'M', u'ⴧ'), - (0x10C8, 'X'), - (0x10CD, 'M', u'ⴭ'), - (0x10CE, 'X'), - (0x10D0, 'V'), - (0x10FC, 'M', u'ნ'), - (0x10FD, 'V'), - (0x115F, 'X'), - (0x1161, 'V'), - (0x1249, 'X'), - (0x124A, 'V'), - (0x124E, 'X'), - (0x1250, 'V'), - (0x1257, 'X'), - (0x1258, 'V'), - (0x1259, 'X'), - (0x125A, 'V'), - (0x125E, 'X'), - (0x1260, 'V'), - (0x1289, 'X'), - (0x128A, 'V'), - ] - -def _seg_14(): - return [ - (0x128E, 'X'), - (0x1290, 'V'), - (0x12B1, 'X'), - (0x12B2, 'V'), - (0x12B6, 'X'), - (0x12B8, 'V'), - (0x12BF, 'X'), - (0x12C0, 'V'), - (0x12C1, 'X'), - (0x12C2, 'V'), - (0x12C6, 'X'), - (0x12C8, 'V'), - (0x12D7, 'X'), - (0x12D8, 'V'), - (0x1311, 'X'), - (0x1312, 'V'), - (0x1316, 'X'), - (0x1318, 'V'), - (0x135B, 'X'), - (0x135D, 'V'), - (0x137D, 'X'), - (0x1380, 'V'), - (0x139A, 'X'), - (0x13A0, 'V'), - (0x13F6, 'X'), - (0x13F8, 'M', u'Ᏸ'), - (0x13F9, 'M', u'Ᏹ'), - (0x13FA, 'M', u'Ᏺ'), - (0x13FB, 'M', u'Ᏻ'), - (0x13FC, 'M', u'Ᏼ'), - (0x13FD, 'M', u'Ᏽ'), - (0x13FE, 'X'), - (0x1400, 'V'), - (0x1680, 'X'), - (0x1681, 'V'), - (0x169D, 'X'), - (0x16A0, 'V'), - (0x16F9, 'X'), - (0x1700, 'V'), - (0x170D, 'X'), - (0x170E, 'V'), - (0x1715, 'X'), - (0x1720, 'V'), - (0x1737, 'X'), - (0x1740, 'V'), - (0x1754, 'X'), - (0x1760, 'V'), - (0x176D, 'X'), - (0x176E, 'V'), - (0x1771, 'X'), - (0x1772, 'V'), - (0x1774, 'X'), - (0x1780, 'V'), - (0x17B4, 'X'), - (0x17B6, 'V'), - (0x17DE, 'X'), - (0x17E0, 'V'), - (0x17EA, 'X'), - (0x17F0, 'V'), - (0x17FA, 'X'), - (0x1800, 'V'), - (0x1806, 'X'), - (0x1807, 'V'), - (0x180B, 'I'), - (0x180E, 'X'), - (0x1810, 'V'), - (0x181A, 'X'), - (0x1820, 'V'), - (0x1879, 'X'), - (0x1880, 'V'), - (0x18AB, 'X'), - (0x18B0, 'V'), - (0x18F6, 'X'), - (0x1900, 'V'), - (0x191F, 'X'), - (0x1920, 'V'), - (0x192C, 'X'), - (0x1930, 'V'), - (0x193C, 'X'), - (0x1940, 'V'), - (0x1941, 'X'), - (0x1944, 'V'), - (0x196E, 'X'), - (0x1970, 'V'), - (0x1975, 'X'), - (0x1980, 'V'), - (0x19AC, 'X'), - (0x19B0, 'V'), - (0x19CA, 'X'), - (0x19D0, 'V'), - (0x19DB, 'X'), - (0x19DE, 'V'), - (0x1A1C, 'X'), - (0x1A1E, 'V'), - (0x1A5F, 'X'), - (0x1A60, 'V'), - (0x1A7D, 'X'), - (0x1A7F, 'V'), - (0x1A8A, 'X'), - (0x1A90, 'V'), - ] - -def _seg_15(): - return [ - (0x1A9A, 'X'), - (0x1AA0, 'V'), - (0x1AAE, 'X'), - (0x1AB0, 'V'), - (0x1ABF, 'X'), - (0x1B00, 'V'), - (0x1B4C, 'X'), - (0x1B50, 'V'), - (0x1B7D, 'X'), - (0x1B80, 'V'), - (0x1BF4, 'X'), - (0x1BFC, 'V'), - (0x1C38, 'X'), - (0x1C3B, 'V'), - (0x1C4A, 'X'), - (0x1C4D, 'V'), - (0x1C80, 'M', u'в'), - (0x1C81, 'M', u'д'), - (0x1C82, 'M', u'о'), - (0x1C83, 'M', u'с'), - (0x1C84, 'M', u'т'), - (0x1C86, 'M', u'ъ'), - (0x1C87, 'M', u'ѣ'), - (0x1C88, 'M', u'ꙋ'), - (0x1C89, 'X'), - (0x1CC0, 'V'), - (0x1CC8, 'X'), - (0x1CD0, 'V'), - (0x1CFA, 'X'), - (0x1D00, 'V'), - (0x1D2C, 'M', u'a'), - (0x1D2D, 'M', u'æ'), - (0x1D2E, 'M', u'b'), - (0x1D2F, 'V'), - (0x1D30, 'M', u'd'), - (0x1D31, 'M', u'e'), - (0x1D32, 'M', u'ǝ'), - (0x1D33, 'M', u'g'), - (0x1D34, 'M', u'h'), - (0x1D35, 'M', u'i'), - (0x1D36, 'M', u'j'), - (0x1D37, 'M', u'k'), - (0x1D38, 'M', u'l'), - (0x1D39, 'M', u'm'), - (0x1D3A, 'M', u'n'), - (0x1D3B, 'V'), - (0x1D3C, 'M', u'o'), - (0x1D3D, 'M', u'ȣ'), - (0x1D3E, 'M', u'p'), - (0x1D3F, 'M', u'r'), - (0x1D40, 'M', u't'), - (0x1D41, 'M', u'u'), - (0x1D42, 'M', u'w'), - (0x1D43, 'M', u'a'), - (0x1D44, 'M', u'ɐ'), - (0x1D45, 'M', u'ɑ'), - (0x1D46, 'M', u'ᴂ'), - (0x1D47, 'M', u'b'), - (0x1D48, 'M', u'd'), - (0x1D49, 'M', u'e'), - (0x1D4A, 'M', u'ə'), - (0x1D4B, 'M', u'ɛ'), - (0x1D4C, 'M', u'ɜ'), - (0x1D4D, 'M', u'g'), - (0x1D4E, 'V'), - (0x1D4F, 'M', u'k'), - (0x1D50, 'M', u'm'), - (0x1D51, 'M', u'ŋ'), - (0x1D52, 'M', u'o'), - (0x1D53, 'M', u'ɔ'), - (0x1D54, 'M', u'ᴖ'), - (0x1D55, 'M', u'ᴗ'), - (0x1D56, 'M', u'p'), - (0x1D57, 'M', u't'), - (0x1D58, 'M', u'u'), - (0x1D59, 'M', u'ᴝ'), - (0x1D5A, 'M', u'ɯ'), - (0x1D5B, 'M', u'v'), - (0x1D5C, 'M', u'ᴥ'), - (0x1D5D, 'M', u'β'), - (0x1D5E, 'M', u'γ'), - (0x1D5F, 'M', u'δ'), - (0x1D60, 'M', u'φ'), - (0x1D61, 'M', u'χ'), - (0x1D62, 'M', u'i'), - (0x1D63, 'M', u'r'), - (0x1D64, 'M', u'u'), - (0x1D65, 'M', u'v'), - (0x1D66, 'M', u'β'), - (0x1D67, 'M', u'γ'), - (0x1D68, 'M', u'ρ'), - (0x1D69, 'M', u'φ'), - (0x1D6A, 'M', u'χ'), - (0x1D6B, 'V'), - (0x1D78, 'M', u'н'), - (0x1D79, 'V'), - (0x1D9B, 'M', u'ɒ'), - (0x1D9C, 'M', u'c'), - (0x1D9D, 'M', u'ɕ'), - (0x1D9E, 'M', u'ð'), - ] - -def _seg_16(): - return [ - (0x1D9F, 'M', u'ɜ'), - (0x1DA0, 'M', u'f'), - (0x1DA1, 'M', u'ɟ'), - (0x1DA2, 'M', u'ɡ'), - (0x1DA3, 'M', u'ɥ'), - (0x1DA4, 'M', u'ɨ'), - (0x1DA5, 'M', u'ɩ'), - (0x1DA6, 'M', u'ɪ'), - (0x1DA7, 'M', u'ᵻ'), - (0x1DA8, 'M', u'ʝ'), - (0x1DA9, 'M', u'ɭ'), - (0x1DAA, 'M', u'ᶅ'), - (0x1DAB, 'M', u'ʟ'), - (0x1DAC, 'M', u'ɱ'), - (0x1DAD, 'M', u'ɰ'), - (0x1DAE, 'M', u'ɲ'), - (0x1DAF, 'M', u'ɳ'), - (0x1DB0, 'M', u'ɴ'), - (0x1DB1, 'M', u'ɵ'), - (0x1DB2, 'M', u'ɸ'), - (0x1DB3, 'M', u'ʂ'), - (0x1DB4, 'M', u'ʃ'), - (0x1DB5, 'M', u'ƫ'), - (0x1DB6, 'M', u'ʉ'), - (0x1DB7, 'M', u'ʊ'), - (0x1DB8, 'M', u'ᴜ'), - (0x1DB9, 'M', u'ʋ'), - (0x1DBA, 'M', u'ʌ'), - (0x1DBB, 'M', u'z'), - (0x1DBC, 'M', u'ʐ'), - (0x1DBD, 'M', u'ʑ'), - (0x1DBE, 'M', u'ʒ'), - (0x1DBF, 'M', u'θ'), - (0x1DC0, 'V'), - (0x1DFA, 'X'), - (0x1DFB, 'V'), - (0x1E00, 'M', u'ḁ'), - (0x1E01, 'V'), - (0x1E02, 'M', u'ḃ'), - (0x1E03, 'V'), - (0x1E04, 'M', u'ḅ'), - (0x1E05, 'V'), - (0x1E06, 'M', u'ḇ'), - (0x1E07, 'V'), - (0x1E08, 'M', u'ḉ'), - (0x1E09, 'V'), - (0x1E0A, 'M', u'ḋ'), - (0x1E0B, 'V'), - (0x1E0C, 'M', u'ḍ'), - (0x1E0D, 'V'), - (0x1E0E, 'M', u'ḏ'), - (0x1E0F, 'V'), - (0x1E10, 'M', u'ḑ'), - (0x1E11, 'V'), - (0x1E12, 'M', u'ḓ'), - (0x1E13, 'V'), - (0x1E14, 'M', u'ḕ'), - (0x1E15, 'V'), - (0x1E16, 'M', u'ḗ'), - (0x1E17, 'V'), - (0x1E18, 'M', u'ḙ'), - (0x1E19, 'V'), - (0x1E1A, 'M', u'ḛ'), - (0x1E1B, 'V'), - (0x1E1C, 'M', u'ḝ'), - (0x1E1D, 'V'), - (0x1E1E, 'M', u'ḟ'), - (0x1E1F, 'V'), - (0x1E20, 'M', u'ḡ'), - (0x1E21, 'V'), - (0x1E22, 'M', u'ḣ'), - (0x1E23, 'V'), - (0x1E24, 'M', u'ḥ'), - (0x1E25, 'V'), - (0x1E26, 'M', u'ḧ'), - (0x1E27, 'V'), - (0x1E28, 'M', u'ḩ'), - (0x1E29, 'V'), - (0x1E2A, 'M', u'ḫ'), - (0x1E2B, 'V'), - (0x1E2C, 'M', u'ḭ'), - (0x1E2D, 'V'), - (0x1E2E, 'M', u'ḯ'), - (0x1E2F, 'V'), - (0x1E30, 'M', u'ḱ'), - (0x1E31, 'V'), - (0x1E32, 'M', u'ḳ'), - (0x1E33, 'V'), - (0x1E34, 'M', u'ḵ'), - (0x1E35, 'V'), - (0x1E36, 'M', u'ḷ'), - (0x1E37, 'V'), - (0x1E38, 'M', u'ḹ'), - (0x1E39, 'V'), - (0x1E3A, 'M', u'ḻ'), - (0x1E3B, 'V'), - (0x1E3C, 'M', u'ḽ'), - (0x1E3D, 'V'), - (0x1E3E, 'M', u'ḿ'), - (0x1E3F, 'V'), - ] - -def _seg_17(): - return [ - (0x1E40, 'M', u'ṁ'), - (0x1E41, 'V'), - (0x1E42, 'M', u'ṃ'), - (0x1E43, 'V'), - (0x1E44, 'M', u'ṅ'), - (0x1E45, 'V'), - (0x1E46, 'M', u'ṇ'), - (0x1E47, 'V'), - (0x1E48, 'M', u'ṉ'), - (0x1E49, 'V'), - (0x1E4A, 'M', u'ṋ'), - (0x1E4B, 'V'), - (0x1E4C, 'M', u'ṍ'), - (0x1E4D, 'V'), - (0x1E4E, 'M', u'ṏ'), - (0x1E4F, 'V'), - (0x1E50, 'M', u'ṑ'), - (0x1E51, 'V'), - (0x1E52, 'M', u'ṓ'), - (0x1E53, 'V'), - (0x1E54, 'M', u'ṕ'), - (0x1E55, 'V'), - (0x1E56, 'M', u'ṗ'), - (0x1E57, 'V'), - (0x1E58, 'M', u'ṙ'), - (0x1E59, 'V'), - (0x1E5A, 'M', u'ṛ'), - (0x1E5B, 'V'), - (0x1E5C, 'M', u'ṝ'), - (0x1E5D, 'V'), - (0x1E5E, 'M', u'ṟ'), - (0x1E5F, 'V'), - (0x1E60, 'M', u'ṡ'), - (0x1E61, 'V'), - (0x1E62, 'M', u'ṣ'), - (0x1E63, 'V'), - (0x1E64, 'M', u'ṥ'), - (0x1E65, 'V'), - (0x1E66, 'M', u'ṧ'), - (0x1E67, 'V'), - (0x1E68, 'M', u'ṩ'), - (0x1E69, 'V'), - (0x1E6A, 'M', u'ṫ'), - (0x1E6B, 'V'), - (0x1E6C, 'M', u'ṭ'), - (0x1E6D, 'V'), - (0x1E6E, 'M', u'ṯ'), - (0x1E6F, 'V'), - (0x1E70, 'M', u'ṱ'), - (0x1E71, 'V'), - (0x1E72, 'M', u'ṳ'), - (0x1E73, 'V'), - (0x1E74, 'M', u'ṵ'), - (0x1E75, 'V'), - (0x1E76, 'M', u'ṷ'), - (0x1E77, 'V'), - (0x1E78, 'M', u'ṹ'), - (0x1E79, 'V'), - (0x1E7A, 'M', u'ṻ'), - (0x1E7B, 'V'), - (0x1E7C, 'M', u'ṽ'), - (0x1E7D, 'V'), - (0x1E7E, 'M', u'ṿ'), - (0x1E7F, 'V'), - (0x1E80, 'M', u'ẁ'), - (0x1E81, 'V'), - (0x1E82, 'M', u'ẃ'), - (0x1E83, 'V'), - (0x1E84, 'M', u'ẅ'), - (0x1E85, 'V'), - (0x1E86, 'M', u'ẇ'), - (0x1E87, 'V'), - (0x1E88, 'M', u'ẉ'), - (0x1E89, 'V'), - (0x1E8A, 'M', u'ẋ'), - (0x1E8B, 'V'), - (0x1E8C, 'M', u'ẍ'), - (0x1E8D, 'V'), - (0x1E8E, 'M', u'ẏ'), - (0x1E8F, 'V'), - (0x1E90, 'M', u'ẑ'), - (0x1E91, 'V'), - (0x1E92, 'M', u'ẓ'), - (0x1E93, 'V'), - (0x1E94, 'M', u'ẕ'), - (0x1E95, 'V'), - (0x1E9A, 'M', u'aʾ'), - (0x1E9B, 'M', u'ṡ'), - (0x1E9C, 'V'), - (0x1E9E, 'M', u'ss'), - (0x1E9F, 'V'), - (0x1EA0, 'M', u'ạ'), - (0x1EA1, 'V'), - (0x1EA2, 'M', u'ả'), - (0x1EA3, 'V'), - (0x1EA4, 'M', u'ấ'), - (0x1EA5, 'V'), - (0x1EA6, 'M', u'ầ'), - (0x1EA7, 'V'), - (0x1EA8, 'M', u'ẩ'), - ] - -def _seg_18(): - return [ - (0x1EA9, 'V'), - (0x1EAA, 'M', u'ẫ'), - (0x1EAB, 'V'), - (0x1EAC, 'M', u'ậ'), - (0x1EAD, 'V'), - (0x1EAE, 'M', u'ắ'), - (0x1EAF, 'V'), - (0x1EB0, 'M', u'ằ'), - (0x1EB1, 'V'), - (0x1EB2, 'M', u'ẳ'), - (0x1EB3, 'V'), - (0x1EB4, 'M', u'ẵ'), - (0x1EB5, 'V'), - (0x1EB6, 'M', u'ặ'), - (0x1EB7, 'V'), - (0x1EB8, 'M', u'ẹ'), - (0x1EB9, 'V'), - (0x1EBA, 'M', u'ẻ'), - (0x1EBB, 'V'), - (0x1EBC, 'M', u'ẽ'), - (0x1EBD, 'V'), - (0x1EBE, 'M', u'ế'), - (0x1EBF, 'V'), - (0x1EC0, 'M', u'ề'), - (0x1EC1, 'V'), - (0x1EC2, 'M', u'ể'), - (0x1EC3, 'V'), - (0x1EC4, 'M', u'ễ'), - (0x1EC5, 'V'), - (0x1EC6, 'M', u'ệ'), - (0x1EC7, 'V'), - (0x1EC8, 'M', u'ỉ'), - (0x1EC9, 'V'), - (0x1ECA, 'M', u'ị'), - (0x1ECB, 'V'), - (0x1ECC, 'M', u'ọ'), - (0x1ECD, 'V'), - (0x1ECE, 'M', u'ỏ'), - (0x1ECF, 'V'), - (0x1ED0, 'M', u'ố'), - (0x1ED1, 'V'), - (0x1ED2, 'M', u'ồ'), - (0x1ED3, 'V'), - (0x1ED4, 'M', u'ổ'), - (0x1ED5, 'V'), - (0x1ED6, 'M', u'ỗ'), - (0x1ED7, 'V'), - (0x1ED8, 'M', u'ộ'), - (0x1ED9, 'V'), - (0x1EDA, 'M', u'ớ'), - (0x1EDB, 'V'), - (0x1EDC, 'M', u'ờ'), - (0x1EDD, 'V'), - (0x1EDE, 'M', u'ở'), - (0x1EDF, 'V'), - (0x1EE0, 'M', u'ỡ'), - (0x1EE1, 'V'), - (0x1EE2, 'M', u'ợ'), - (0x1EE3, 'V'), - (0x1EE4, 'M', u'ụ'), - (0x1EE5, 'V'), - (0x1EE6, 'M', u'ủ'), - (0x1EE7, 'V'), - (0x1EE8, 'M', u'ứ'), - (0x1EE9, 'V'), - (0x1EEA, 'M', u'ừ'), - (0x1EEB, 'V'), - (0x1EEC, 'M', u'ử'), - (0x1EED, 'V'), - (0x1EEE, 'M', u'ữ'), - (0x1EEF, 'V'), - (0x1EF0, 'M', u'ự'), - (0x1EF1, 'V'), - (0x1EF2, 'M', u'ỳ'), - (0x1EF3, 'V'), - (0x1EF4, 'M', u'ỵ'), - (0x1EF5, 'V'), - (0x1EF6, 'M', u'ỷ'), - (0x1EF7, 'V'), - (0x1EF8, 'M', u'ỹ'), - (0x1EF9, 'V'), - (0x1EFA, 'M', u'ỻ'), - (0x1EFB, 'V'), - (0x1EFC, 'M', u'ỽ'), - (0x1EFD, 'V'), - (0x1EFE, 'M', u'ỿ'), - (0x1EFF, 'V'), - (0x1F08, 'M', u'ἀ'), - (0x1F09, 'M', u'ἁ'), - (0x1F0A, 'M', u'ἂ'), - (0x1F0B, 'M', u'ἃ'), - (0x1F0C, 'M', u'ἄ'), - (0x1F0D, 'M', u'ἅ'), - (0x1F0E, 'M', u'ἆ'), - (0x1F0F, 'M', u'ἇ'), - (0x1F10, 'V'), - (0x1F16, 'X'), - (0x1F18, 'M', u'ἐ'), - (0x1F19, 'M', u'ἑ'), - (0x1F1A, 'M', u'ἒ'), - ] - -def _seg_19(): - return [ - (0x1F1B, 'M', u'ἓ'), - (0x1F1C, 'M', u'ἔ'), - (0x1F1D, 'M', u'ἕ'), - (0x1F1E, 'X'), - (0x1F20, 'V'), - (0x1F28, 'M', u'ἠ'), - (0x1F29, 'M', u'ἡ'), - (0x1F2A, 'M', u'ἢ'), - (0x1F2B, 'M', u'ἣ'), - (0x1F2C, 'M', u'ἤ'), - (0x1F2D, 'M', u'ἥ'), - (0x1F2E, 'M', u'ἦ'), - (0x1F2F, 'M', u'ἧ'), - (0x1F30, 'V'), - (0x1F38, 'M', u'ἰ'), - (0x1F39, 'M', u'ἱ'), - (0x1F3A, 'M', u'ἲ'), - (0x1F3B, 'M', u'ἳ'), - (0x1F3C, 'M', u'ἴ'), - (0x1F3D, 'M', u'ἵ'), - (0x1F3E, 'M', u'ἶ'), - (0x1F3F, 'M', u'ἷ'), - (0x1F40, 'V'), - (0x1F46, 'X'), - (0x1F48, 'M', u'ὀ'), - (0x1F49, 'M', u'ὁ'), - (0x1F4A, 'M', u'ὂ'), - (0x1F4B, 'M', u'ὃ'), - (0x1F4C, 'M', u'ὄ'), - (0x1F4D, 'M', u'ὅ'), - (0x1F4E, 'X'), - (0x1F50, 'V'), - (0x1F58, 'X'), - (0x1F59, 'M', u'ὑ'), - (0x1F5A, 'X'), - (0x1F5B, 'M', u'ὓ'), - (0x1F5C, 'X'), - (0x1F5D, 'M', u'ὕ'), - (0x1F5E, 'X'), - (0x1F5F, 'M', u'ὗ'), - (0x1F60, 'V'), - (0x1F68, 'M', u'ὠ'), - (0x1F69, 'M', u'ὡ'), - (0x1F6A, 'M', u'ὢ'), - (0x1F6B, 'M', u'ὣ'), - (0x1F6C, 'M', u'ὤ'), - (0x1F6D, 'M', u'ὥ'), - (0x1F6E, 'M', u'ὦ'), - (0x1F6F, 'M', u'ὧ'), - (0x1F70, 'V'), - (0x1F71, 'M', u'ά'), - (0x1F72, 'V'), - (0x1F73, 'M', u'έ'), - (0x1F74, 'V'), - (0x1F75, 'M', u'ή'), - (0x1F76, 'V'), - (0x1F77, 'M', u'ί'), - (0x1F78, 'V'), - (0x1F79, 'M', u'ό'), - (0x1F7A, 'V'), - (0x1F7B, 'M', u'ύ'), - (0x1F7C, 'V'), - (0x1F7D, 'M', u'ώ'), - (0x1F7E, 'X'), - (0x1F80, 'M', u'ἀι'), - (0x1F81, 'M', u'ἁι'), - (0x1F82, 'M', u'ἂι'), - (0x1F83, 'M', u'ἃι'), - (0x1F84, 'M', u'ἄι'), - (0x1F85, 'M', u'ἅι'), - (0x1F86, 'M', u'ἆι'), - (0x1F87, 'M', u'ἇι'), - (0x1F88, 'M', u'ἀι'), - (0x1F89, 'M', u'ἁι'), - (0x1F8A, 'M', u'ἂι'), - (0x1F8B, 'M', u'ἃι'), - (0x1F8C, 'M', u'ἄι'), - (0x1F8D, 'M', u'ἅι'), - (0x1F8E, 'M', u'ἆι'), - (0x1F8F, 'M', u'ἇι'), - (0x1F90, 'M', u'ἠι'), - (0x1F91, 'M', u'ἡι'), - (0x1F92, 'M', u'ἢι'), - (0x1F93, 'M', u'ἣι'), - (0x1F94, 'M', u'ἤι'), - (0x1F95, 'M', u'ἥι'), - (0x1F96, 'M', u'ἦι'), - (0x1F97, 'M', u'ἧι'), - (0x1F98, 'M', u'ἠι'), - (0x1F99, 'M', u'ἡι'), - (0x1F9A, 'M', u'ἢι'), - (0x1F9B, 'M', u'ἣι'), - (0x1F9C, 'M', u'ἤι'), - (0x1F9D, 'M', u'ἥι'), - (0x1F9E, 'M', u'ἦι'), - (0x1F9F, 'M', u'ἧι'), - (0x1FA0, 'M', u'ὠι'), - (0x1FA1, 'M', u'ὡι'), - (0x1FA2, 'M', u'ὢι'), - (0x1FA3, 'M', u'ὣι'), - ] - -def _seg_20(): - return [ - (0x1FA4, 'M', u'ὤι'), - (0x1FA5, 'M', u'ὥι'), - (0x1FA6, 'M', u'ὦι'), - (0x1FA7, 'M', u'ὧι'), - (0x1FA8, 'M', u'ὠι'), - (0x1FA9, 'M', u'ὡι'), - (0x1FAA, 'M', u'ὢι'), - (0x1FAB, 'M', u'ὣι'), - (0x1FAC, 'M', u'ὤι'), - (0x1FAD, 'M', u'ὥι'), - (0x1FAE, 'M', u'ὦι'), - (0x1FAF, 'M', u'ὧι'), - (0x1FB0, 'V'), - (0x1FB2, 'M', u'ὰι'), - (0x1FB3, 'M', u'αι'), - (0x1FB4, 'M', u'άι'), - (0x1FB5, 'X'), - (0x1FB6, 'V'), - (0x1FB7, 'M', u'ᾶι'), - (0x1FB8, 'M', u'ᾰ'), - (0x1FB9, 'M', u'ᾱ'), - (0x1FBA, 'M', u'ὰ'), - (0x1FBB, 'M', u'ά'), - (0x1FBC, 'M', u'αι'), - (0x1FBD, '3', u' ̓'), - (0x1FBE, 'M', u'ι'), - (0x1FBF, '3', u' ̓'), - (0x1FC0, '3', u' ͂'), - (0x1FC1, '3', u' ̈͂'), - (0x1FC2, 'M', u'ὴι'), - (0x1FC3, 'M', u'ηι'), - (0x1FC4, 'M', u'ήι'), - (0x1FC5, 'X'), - (0x1FC6, 'V'), - (0x1FC7, 'M', u'ῆι'), - (0x1FC8, 'M', u'ὲ'), - (0x1FC9, 'M', u'έ'), - (0x1FCA, 'M', u'ὴ'), - (0x1FCB, 'M', u'ή'), - (0x1FCC, 'M', u'ηι'), - (0x1FCD, '3', u' ̓̀'), - (0x1FCE, '3', u' ̓́'), - (0x1FCF, '3', u' ̓͂'), - (0x1FD0, 'V'), - (0x1FD3, 'M', u'ΐ'), - (0x1FD4, 'X'), - (0x1FD6, 'V'), - (0x1FD8, 'M', u'ῐ'), - (0x1FD9, 'M', u'ῑ'), - (0x1FDA, 'M', u'ὶ'), - (0x1FDB, 'M', u'ί'), - (0x1FDC, 'X'), - (0x1FDD, '3', u' ̔̀'), - (0x1FDE, '3', u' ̔́'), - (0x1FDF, '3', u' ̔͂'), - (0x1FE0, 'V'), - (0x1FE3, 'M', u'ΰ'), - (0x1FE4, 'V'), - (0x1FE8, 'M', u'ῠ'), - (0x1FE9, 'M', u'ῡ'), - (0x1FEA, 'M', u'ὺ'), - (0x1FEB, 'M', u'ύ'), - (0x1FEC, 'M', u'ῥ'), - (0x1FED, '3', u' ̈̀'), - (0x1FEE, '3', u' ̈́'), - (0x1FEF, '3', u'`'), - (0x1FF0, 'X'), - (0x1FF2, 'M', u'ὼι'), - (0x1FF3, 'M', u'ωι'), - (0x1FF4, 'M', u'ώι'), - (0x1FF5, 'X'), - (0x1FF6, 'V'), - (0x1FF7, 'M', u'ῶι'), - (0x1FF8, 'M', u'ὸ'), - (0x1FF9, 'M', u'ό'), - (0x1FFA, 'M', u'ὼ'), - (0x1FFB, 'M', u'ώ'), - (0x1FFC, 'M', u'ωι'), - (0x1FFD, '3', u' ́'), - (0x1FFE, '3', u' ̔'), - (0x1FFF, 'X'), - (0x2000, '3', u' '), - (0x200B, 'I'), - (0x200C, 'D', u''), - (0x200E, 'X'), - (0x2010, 'V'), - (0x2011, 'M', u'‐'), - (0x2012, 'V'), - (0x2017, '3', u' ̳'), - (0x2018, 'V'), - (0x2024, 'X'), - (0x2027, 'V'), - (0x2028, 'X'), - (0x202F, '3', u' '), - (0x2030, 'V'), - (0x2033, 'M', u'′′'), - (0x2034, 'M', u'′′′'), - (0x2035, 'V'), - (0x2036, 'M', u'‵‵'), - (0x2037, 'M', u'‵‵‵'), - ] - -def _seg_21(): - return [ - (0x2038, 'V'), - (0x203C, '3', u'!!'), - (0x203D, 'V'), - (0x203E, '3', u' ̅'), - (0x203F, 'V'), - (0x2047, '3', u'??'), - (0x2048, '3', u'?!'), - (0x2049, '3', u'!?'), - (0x204A, 'V'), - (0x2057, 'M', u'′′′′'), - (0x2058, 'V'), - (0x205F, '3', u' '), - (0x2060, 'I'), - (0x2061, 'X'), - (0x2064, 'I'), - (0x2065, 'X'), - (0x2070, 'M', u'0'), - (0x2071, 'M', u'i'), - (0x2072, 'X'), - (0x2074, 'M', u'4'), - (0x2075, 'M', u'5'), - (0x2076, 'M', u'6'), - (0x2077, 'M', u'7'), - (0x2078, 'M', u'8'), - (0x2079, 'M', u'9'), - (0x207A, '3', u'+'), - (0x207B, 'M', u'−'), - (0x207C, '3', u'='), - (0x207D, '3', u'('), - (0x207E, '3', u')'), - (0x207F, 'M', u'n'), - (0x2080, 'M', u'0'), - (0x2081, 'M', u'1'), - (0x2082, 'M', u'2'), - (0x2083, 'M', u'3'), - (0x2084, 'M', u'4'), - (0x2085, 'M', u'5'), - (0x2086, 'M', u'6'), - (0x2087, 'M', u'7'), - (0x2088, 'M', u'8'), - (0x2089, 'M', u'9'), - (0x208A, '3', u'+'), - (0x208B, 'M', u'−'), - (0x208C, '3', u'='), - (0x208D, '3', u'('), - (0x208E, '3', u')'), - (0x208F, 'X'), - (0x2090, 'M', u'a'), - (0x2091, 'M', u'e'), - (0x2092, 'M', u'o'), - (0x2093, 'M', u'x'), - (0x2094, 'M', u'ə'), - (0x2095, 'M', u'h'), - (0x2096, 'M', u'k'), - (0x2097, 'M', u'l'), - (0x2098, 'M', u'm'), - (0x2099, 'M', u'n'), - (0x209A, 'M', u'p'), - (0x209B, 'M', u's'), - (0x209C, 'M', u't'), - (0x209D, 'X'), - (0x20A0, 'V'), - (0x20A8, 'M', u'rs'), - (0x20A9, 'V'), - (0x20C0, 'X'), - (0x20D0, 'V'), - (0x20F1, 'X'), - (0x2100, '3', u'a/c'), - (0x2101, '3', u'a/s'), - (0x2102, 'M', u'c'), - (0x2103, 'M', u'°c'), - (0x2104, 'V'), - (0x2105, '3', u'c/o'), - (0x2106, '3', u'c/u'), - (0x2107, 'M', u'ɛ'), - (0x2108, 'V'), - (0x2109, 'M', u'°f'), - (0x210A, 'M', u'g'), - (0x210B, 'M', u'h'), - (0x210F, 'M', u'ħ'), - (0x2110, 'M', u'i'), - (0x2112, 'M', u'l'), - (0x2114, 'V'), - (0x2115, 'M', u'n'), - (0x2116, 'M', u'no'), - (0x2117, 'V'), - (0x2119, 'M', u'p'), - (0x211A, 'M', u'q'), - (0x211B, 'M', u'r'), - (0x211E, 'V'), - (0x2120, 'M', u'sm'), - (0x2121, 'M', u'tel'), - (0x2122, 'M', u'tm'), - (0x2123, 'V'), - (0x2124, 'M', u'z'), - (0x2125, 'V'), - (0x2126, 'M', u'ω'), - (0x2127, 'V'), - (0x2128, 'M', u'z'), - (0x2129, 'V'), - ] - -def _seg_22(): - return [ - (0x212A, 'M', u'k'), - (0x212B, 'M', u'å'), - (0x212C, 'M', u'b'), - (0x212D, 'M', u'c'), - (0x212E, 'V'), - (0x212F, 'M', u'e'), - (0x2131, 'M', u'f'), - (0x2132, 'X'), - (0x2133, 'M', u'm'), - (0x2134, 'M', u'o'), - (0x2135, 'M', u'א'), - (0x2136, 'M', u'ב'), - (0x2137, 'M', u'ג'), - (0x2138, 'M', u'ד'), - (0x2139, 'M', u'i'), - (0x213A, 'V'), - (0x213B, 'M', u'fax'), - (0x213C, 'M', u'π'), - (0x213D, 'M', u'γ'), - (0x213F, 'M', u'π'), - (0x2140, 'M', u'∑'), - (0x2141, 'V'), - (0x2145, 'M', u'd'), - (0x2147, 'M', u'e'), - (0x2148, 'M', u'i'), - (0x2149, 'M', u'j'), - (0x214A, 'V'), - (0x2150, 'M', u'1⁄7'), - (0x2151, 'M', u'1⁄9'), - (0x2152, 'M', u'1⁄10'), - (0x2153, 'M', u'1⁄3'), - (0x2154, 'M', u'2⁄3'), - (0x2155, 'M', u'1⁄5'), - (0x2156, 'M', u'2⁄5'), - (0x2157, 'M', u'3⁄5'), - (0x2158, 'M', u'4⁄5'), - (0x2159, 'M', u'1⁄6'), - (0x215A, 'M', u'5⁄6'), - (0x215B, 'M', u'1⁄8'), - (0x215C, 'M', u'3⁄8'), - (0x215D, 'M', u'5⁄8'), - (0x215E, 'M', u'7⁄8'), - (0x215F, 'M', u'1⁄'), - (0x2160, 'M', u'i'), - (0x2161, 'M', u'ii'), - (0x2162, 'M', u'iii'), - (0x2163, 'M', u'iv'), - (0x2164, 'M', u'v'), - (0x2165, 'M', u'vi'), - (0x2166, 'M', u'vii'), - (0x2167, 'M', u'viii'), - (0x2168, 'M', u'ix'), - (0x2169, 'M', u'x'), - (0x216A, 'M', u'xi'), - (0x216B, 'M', u'xii'), - (0x216C, 'M', u'l'), - (0x216D, 'M', u'c'), - (0x216E, 'M', u'd'), - (0x216F, 'M', u'm'), - (0x2170, 'M', u'i'), - (0x2171, 'M', u'ii'), - (0x2172, 'M', u'iii'), - (0x2173, 'M', u'iv'), - (0x2174, 'M', u'v'), - (0x2175, 'M', u'vi'), - (0x2176, 'M', u'vii'), - (0x2177, 'M', u'viii'), - (0x2178, 'M', u'ix'), - (0x2179, 'M', u'x'), - (0x217A, 'M', u'xi'), - (0x217B, 'M', u'xii'), - (0x217C, 'M', u'l'), - (0x217D, 'M', u'c'), - (0x217E, 'M', u'd'), - (0x217F, 'M', u'm'), - (0x2180, 'V'), - (0x2183, 'X'), - (0x2184, 'V'), - (0x2189, 'M', u'0⁄3'), - (0x218A, 'V'), - (0x218C, 'X'), - (0x2190, 'V'), - (0x222C, 'M', u'∫∫'), - (0x222D, 'M', u'∫∫∫'), - (0x222E, 'V'), - (0x222F, 'M', u'∮∮'), - (0x2230, 'M', u'∮∮∮'), - (0x2231, 'V'), - (0x2260, '3'), - (0x2261, 'V'), - (0x226E, '3'), - (0x2270, 'V'), - (0x2329, 'M', u'〈'), - (0x232A, 'M', u'〉'), - (0x232B, 'V'), - (0x2427, 'X'), - (0x2440, 'V'), - (0x244B, 'X'), - (0x2460, 'M', u'1'), - (0x2461, 'M', u'2'), - ] - -def _seg_23(): - return [ - (0x2462, 'M', u'3'), - (0x2463, 'M', u'4'), - (0x2464, 'M', u'5'), - (0x2465, 'M', u'6'), - (0x2466, 'M', u'7'), - (0x2467, 'M', u'8'), - (0x2468, 'M', u'9'), - (0x2469, 'M', u'10'), - (0x246A, 'M', u'11'), - (0x246B, 'M', u'12'), - (0x246C, 'M', u'13'), - (0x246D, 'M', u'14'), - (0x246E, 'M', u'15'), - (0x246F, 'M', u'16'), - (0x2470, 'M', u'17'), - (0x2471, 'M', u'18'), - (0x2472, 'M', u'19'), - (0x2473, 'M', u'20'), - (0x2474, '3', u'(1)'), - (0x2475, '3', u'(2)'), - (0x2476, '3', u'(3)'), - (0x2477, '3', u'(4)'), - (0x2478, '3', u'(5)'), - (0x2479, '3', u'(6)'), - (0x247A, '3', u'(7)'), - (0x247B, '3', u'(8)'), - (0x247C, '3', u'(9)'), - (0x247D, '3', u'(10)'), - (0x247E, '3', u'(11)'), - (0x247F, '3', u'(12)'), - (0x2480, '3', u'(13)'), - (0x2481, '3', u'(14)'), - (0x2482, '3', u'(15)'), - (0x2483, '3', u'(16)'), - (0x2484, '3', u'(17)'), - (0x2485, '3', u'(18)'), - (0x2486, '3', u'(19)'), - (0x2487, '3', u'(20)'), - (0x2488, 'X'), - (0x249C, '3', u'(a)'), - (0x249D, '3', u'(b)'), - (0x249E, '3', u'(c)'), - (0x249F, '3', u'(d)'), - (0x24A0, '3', u'(e)'), - (0x24A1, '3', u'(f)'), - (0x24A2, '3', u'(g)'), - (0x24A3, '3', u'(h)'), - (0x24A4, '3', u'(i)'), - (0x24A5, '3', u'(j)'), - (0x24A6, '3', u'(k)'), - (0x24A7, '3', u'(l)'), - (0x24A8, '3', u'(m)'), - (0x24A9, '3', u'(n)'), - (0x24AA, '3', u'(o)'), - (0x24AB, '3', u'(p)'), - (0x24AC, '3', u'(q)'), - (0x24AD, '3', u'(r)'), - (0x24AE, '3', u'(s)'), - (0x24AF, '3', u'(t)'), - (0x24B0, '3', u'(u)'), - (0x24B1, '3', u'(v)'), - (0x24B2, '3', u'(w)'), - (0x24B3, '3', u'(x)'), - (0x24B4, '3', u'(y)'), - (0x24B5, '3', u'(z)'), - (0x24B6, 'M', u'a'), - (0x24B7, 'M', u'b'), - (0x24B8, 'M', u'c'), - (0x24B9, 'M', u'd'), - (0x24BA, 'M', u'e'), - (0x24BB, 'M', u'f'), - (0x24BC, 'M', u'g'), - (0x24BD, 'M', u'h'), - (0x24BE, 'M', u'i'), - (0x24BF, 'M', u'j'), - (0x24C0, 'M', u'k'), - (0x24C1, 'M', u'l'), - (0x24C2, 'M', u'm'), - (0x24C3, 'M', u'n'), - (0x24C4, 'M', u'o'), - (0x24C5, 'M', u'p'), - (0x24C6, 'M', u'q'), - (0x24C7, 'M', u'r'), - (0x24C8, 'M', u's'), - (0x24C9, 'M', u't'), - (0x24CA, 'M', u'u'), - (0x24CB, 'M', u'v'), - (0x24CC, 'M', u'w'), - (0x24CD, 'M', u'x'), - (0x24CE, 'M', u'y'), - (0x24CF, 'M', u'z'), - (0x24D0, 'M', u'a'), - (0x24D1, 'M', u'b'), - (0x24D2, 'M', u'c'), - (0x24D3, 'M', u'd'), - (0x24D4, 'M', u'e'), - (0x24D5, 'M', u'f'), - (0x24D6, 'M', u'g'), - (0x24D7, 'M', u'h'), - (0x24D8, 'M', u'i'), - ] - -def _seg_24(): - return [ - (0x24D9, 'M', u'j'), - (0x24DA, 'M', u'k'), - (0x24DB, 'M', u'l'), - (0x24DC, 'M', u'm'), - (0x24DD, 'M', u'n'), - (0x24DE, 'M', u'o'), - (0x24DF, 'M', u'p'), - (0x24E0, 'M', u'q'), - (0x24E1, 'M', u'r'), - (0x24E2, 'M', u's'), - (0x24E3, 'M', u't'), - (0x24E4, 'M', u'u'), - (0x24E5, 'M', u'v'), - (0x24E6, 'M', u'w'), - (0x24E7, 'M', u'x'), - (0x24E8, 'M', u'y'), - (0x24E9, 'M', u'z'), - (0x24EA, 'M', u'0'), - (0x24EB, 'V'), - (0x2A0C, 'M', u'∫∫∫∫'), - (0x2A0D, 'V'), - (0x2A74, '3', u'::='), - (0x2A75, '3', u'=='), - (0x2A76, '3', u'==='), - (0x2A77, 'V'), - (0x2ADC, 'M', u'⫝̸'), - (0x2ADD, 'V'), - (0x2B74, 'X'), - (0x2B76, 'V'), - (0x2B96, 'X'), - (0x2B98, 'V'), - (0x2BC9, 'X'), - (0x2BCA, 'V'), - (0x2BFF, 'X'), - (0x2C00, 'M', u'ⰰ'), - (0x2C01, 'M', u'ⰱ'), - (0x2C02, 'M', u'ⰲ'), - (0x2C03, 'M', u'ⰳ'), - (0x2C04, 'M', u'ⰴ'), - (0x2C05, 'M', u'ⰵ'), - (0x2C06, 'M', u'ⰶ'), - (0x2C07, 'M', u'ⰷ'), - (0x2C08, 'M', u'ⰸ'), - (0x2C09, 'M', u'ⰹ'), - (0x2C0A, 'M', u'ⰺ'), - (0x2C0B, 'M', u'ⰻ'), - (0x2C0C, 'M', u'ⰼ'), - (0x2C0D, 'M', u'ⰽ'), - (0x2C0E, 'M', u'ⰾ'), - (0x2C0F, 'M', u'ⰿ'), - (0x2C10, 'M', u'ⱀ'), - (0x2C11, 'M', u'ⱁ'), - (0x2C12, 'M', u'ⱂ'), - (0x2C13, 'M', u'ⱃ'), - (0x2C14, 'M', u'ⱄ'), - (0x2C15, 'M', u'ⱅ'), - (0x2C16, 'M', u'ⱆ'), - (0x2C17, 'M', u'ⱇ'), - (0x2C18, 'M', u'ⱈ'), - (0x2C19, 'M', u'ⱉ'), - (0x2C1A, 'M', u'ⱊ'), - (0x2C1B, 'M', u'ⱋ'), - (0x2C1C, 'M', u'ⱌ'), - (0x2C1D, 'M', u'ⱍ'), - (0x2C1E, 'M', u'ⱎ'), - (0x2C1F, 'M', u'ⱏ'), - (0x2C20, 'M', u'ⱐ'), - (0x2C21, 'M', u'ⱑ'), - (0x2C22, 'M', u'ⱒ'), - (0x2C23, 'M', u'ⱓ'), - (0x2C24, 'M', u'ⱔ'), - (0x2C25, 'M', u'ⱕ'), - (0x2C26, 'M', u'ⱖ'), - (0x2C27, 'M', u'ⱗ'), - (0x2C28, 'M', u'ⱘ'), - (0x2C29, 'M', u'ⱙ'), - (0x2C2A, 'M', u'ⱚ'), - (0x2C2B, 'M', u'ⱛ'), - (0x2C2C, 'M', u'ⱜ'), - (0x2C2D, 'M', u'ⱝ'), - (0x2C2E, 'M', u'ⱞ'), - (0x2C2F, 'X'), - (0x2C30, 'V'), - (0x2C5F, 'X'), - (0x2C60, 'M', u'ⱡ'), - (0x2C61, 'V'), - (0x2C62, 'M', u'ɫ'), - (0x2C63, 'M', u'ᵽ'), - (0x2C64, 'M', u'ɽ'), - (0x2C65, 'V'), - (0x2C67, 'M', u'ⱨ'), - (0x2C68, 'V'), - (0x2C69, 'M', u'ⱪ'), - (0x2C6A, 'V'), - (0x2C6B, 'M', u'ⱬ'), - (0x2C6C, 'V'), - (0x2C6D, 'M', u'ɑ'), - (0x2C6E, 'M', u'ɱ'), - (0x2C6F, 'M', u'ɐ'), - (0x2C70, 'M', u'ɒ'), - ] - -def _seg_25(): - return [ - (0x2C71, 'V'), - (0x2C72, 'M', u'ⱳ'), - (0x2C73, 'V'), - (0x2C75, 'M', u'ⱶ'), - (0x2C76, 'V'), - (0x2C7C, 'M', u'j'), - (0x2C7D, 'M', u'v'), - (0x2C7E, 'M', u'ȿ'), - (0x2C7F, 'M', u'ɀ'), - (0x2C80, 'M', u'ⲁ'), - (0x2C81, 'V'), - (0x2C82, 'M', u'ⲃ'), - (0x2C83, 'V'), - (0x2C84, 'M', u'ⲅ'), - (0x2C85, 'V'), - (0x2C86, 'M', u'ⲇ'), - (0x2C87, 'V'), - (0x2C88, 'M', u'ⲉ'), - (0x2C89, 'V'), - (0x2C8A, 'M', u'ⲋ'), - (0x2C8B, 'V'), - (0x2C8C, 'M', u'ⲍ'), - (0x2C8D, 'V'), - (0x2C8E, 'M', u'ⲏ'), - (0x2C8F, 'V'), - (0x2C90, 'M', u'ⲑ'), - (0x2C91, 'V'), - (0x2C92, 'M', u'ⲓ'), - (0x2C93, 'V'), - (0x2C94, 'M', u'ⲕ'), - (0x2C95, 'V'), - (0x2C96, 'M', u'ⲗ'), - (0x2C97, 'V'), - (0x2C98, 'M', u'ⲙ'), - (0x2C99, 'V'), - (0x2C9A, 'M', u'ⲛ'), - (0x2C9B, 'V'), - (0x2C9C, 'M', u'ⲝ'), - (0x2C9D, 'V'), - (0x2C9E, 'M', u'ⲟ'), - (0x2C9F, 'V'), - (0x2CA0, 'M', u'ⲡ'), - (0x2CA1, 'V'), - (0x2CA2, 'M', u'ⲣ'), - (0x2CA3, 'V'), - (0x2CA4, 'M', u'ⲥ'), - (0x2CA5, 'V'), - (0x2CA6, 'M', u'ⲧ'), - (0x2CA7, 'V'), - (0x2CA8, 'M', u'ⲩ'), - (0x2CA9, 'V'), - (0x2CAA, 'M', u'ⲫ'), - (0x2CAB, 'V'), - (0x2CAC, 'M', u'ⲭ'), - (0x2CAD, 'V'), - (0x2CAE, 'M', u'ⲯ'), - (0x2CAF, 'V'), - (0x2CB0, 'M', u'ⲱ'), - (0x2CB1, 'V'), - (0x2CB2, 'M', u'ⲳ'), - (0x2CB3, 'V'), - (0x2CB4, 'M', u'ⲵ'), - (0x2CB5, 'V'), - (0x2CB6, 'M', u'ⲷ'), - (0x2CB7, 'V'), - (0x2CB8, 'M', u'ⲹ'), - (0x2CB9, 'V'), - (0x2CBA, 'M', u'ⲻ'), - (0x2CBB, 'V'), - (0x2CBC, 'M', u'ⲽ'), - (0x2CBD, 'V'), - (0x2CBE, 'M', u'ⲿ'), - (0x2CBF, 'V'), - (0x2CC0, 'M', u'ⳁ'), - (0x2CC1, 'V'), - (0x2CC2, 'M', u'ⳃ'), - (0x2CC3, 'V'), - (0x2CC4, 'M', u'ⳅ'), - (0x2CC5, 'V'), - (0x2CC6, 'M', u'ⳇ'), - (0x2CC7, 'V'), - (0x2CC8, 'M', u'ⳉ'), - (0x2CC9, 'V'), - (0x2CCA, 'M', u'ⳋ'), - (0x2CCB, 'V'), - (0x2CCC, 'M', u'ⳍ'), - (0x2CCD, 'V'), - (0x2CCE, 'M', u'ⳏ'), - (0x2CCF, 'V'), - (0x2CD0, 'M', u'ⳑ'), - (0x2CD1, 'V'), - (0x2CD2, 'M', u'ⳓ'), - (0x2CD3, 'V'), - (0x2CD4, 'M', u'ⳕ'), - (0x2CD5, 'V'), - (0x2CD6, 'M', u'ⳗ'), - (0x2CD7, 'V'), - (0x2CD8, 'M', u'ⳙ'), - (0x2CD9, 'V'), - (0x2CDA, 'M', u'ⳛ'), - ] - -def _seg_26(): - return [ - (0x2CDB, 'V'), - (0x2CDC, 'M', u'ⳝ'), - (0x2CDD, 'V'), - (0x2CDE, 'M', u'ⳟ'), - (0x2CDF, 'V'), - (0x2CE0, 'M', u'ⳡ'), - (0x2CE1, 'V'), - (0x2CE2, 'M', u'ⳣ'), - (0x2CE3, 'V'), - (0x2CEB, 'M', u'ⳬ'), - (0x2CEC, 'V'), - (0x2CED, 'M', u'ⳮ'), - (0x2CEE, 'V'), - (0x2CF2, 'M', u'ⳳ'), - (0x2CF3, 'V'), - (0x2CF4, 'X'), - (0x2CF9, 'V'), - (0x2D26, 'X'), - (0x2D27, 'V'), - (0x2D28, 'X'), - (0x2D2D, 'V'), - (0x2D2E, 'X'), - (0x2D30, 'V'), - (0x2D68, 'X'), - (0x2D6F, 'M', u'ⵡ'), - (0x2D70, 'V'), - (0x2D71, 'X'), - (0x2D7F, 'V'), - (0x2D97, 'X'), - (0x2DA0, 'V'), - (0x2DA7, 'X'), - (0x2DA8, 'V'), - (0x2DAF, 'X'), - (0x2DB0, 'V'), - (0x2DB7, 'X'), - (0x2DB8, 'V'), - (0x2DBF, 'X'), - (0x2DC0, 'V'), - (0x2DC7, 'X'), - (0x2DC8, 'V'), - (0x2DCF, 'X'), - (0x2DD0, 'V'), - (0x2DD7, 'X'), - (0x2DD8, 'V'), - (0x2DDF, 'X'), - (0x2DE0, 'V'), - (0x2E4F, 'X'), - (0x2E80, 'V'), - (0x2E9A, 'X'), - (0x2E9B, 'V'), - (0x2E9F, 'M', u'母'), - (0x2EA0, 'V'), - (0x2EF3, 'M', u'龟'), - (0x2EF4, 'X'), - (0x2F00, 'M', u'一'), - (0x2F01, 'M', u'丨'), - (0x2F02, 'M', u'丶'), - (0x2F03, 'M', u'丿'), - (0x2F04, 'M', u'乙'), - (0x2F05, 'M', u'亅'), - (0x2F06, 'M', u'二'), - (0x2F07, 'M', u'亠'), - (0x2F08, 'M', u'人'), - (0x2F09, 'M', u'儿'), - (0x2F0A, 'M', u'入'), - (0x2F0B, 'M', u'八'), - (0x2F0C, 'M', u'冂'), - (0x2F0D, 'M', u'冖'), - (0x2F0E, 'M', u'冫'), - (0x2F0F, 'M', u'几'), - (0x2F10, 'M', u'凵'), - (0x2F11, 'M', u'刀'), - (0x2F12, 'M', u'力'), - (0x2F13, 'M', u'勹'), - (0x2F14, 'M', u'匕'), - (0x2F15, 'M', u'匚'), - (0x2F16, 'M', u'匸'), - (0x2F17, 'M', u'十'), - (0x2F18, 'M', u'卜'), - (0x2F19, 'M', u'卩'), - (0x2F1A, 'M', u'厂'), - (0x2F1B, 'M', u'厶'), - (0x2F1C, 'M', u'又'), - (0x2F1D, 'M', u'口'), - (0x2F1E, 'M', u'囗'), - (0x2F1F, 'M', u'土'), - (0x2F20, 'M', u'士'), - (0x2F21, 'M', u'夂'), - (0x2F22, 'M', u'夊'), - (0x2F23, 'M', u'夕'), - (0x2F24, 'M', u'大'), - (0x2F25, 'M', u'女'), - (0x2F26, 'M', u'子'), - (0x2F27, 'M', u'宀'), - (0x2F28, 'M', u'寸'), - (0x2F29, 'M', u'小'), - (0x2F2A, 'M', u'尢'), - (0x2F2B, 'M', u'尸'), - (0x2F2C, 'M', u'屮'), - (0x2F2D, 'M', u'山'), - ] - -def _seg_27(): - return [ - (0x2F2E, 'M', u'巛'), - (0x2F2F, 'M', u'工'), - (0x2F30, 'M', u'己'), - (0x2F31, 'M', u'巾'), - (0x2F32, 'M', u'干'), - (0x2F33, 'M', u'幺'), - (0x2F34, 'M', u'广'), - (0x2F35, 'M', u'廴'), - (0x2F36, 'M', u'廾'), - (0x2F37, 'M', u'弋'), - (0x2F38, 'M', u'弓'), - (0x2F39, 'M', u'彐'), - (0x2F3A, 'M', u'彡'), - (0x2F3B, 'M', u'彳'), - (0x2F3C, 'M', u'心'), - (0x2F3D, 'M', u'戈'), - (0x2F3E, 'M', u'戶'), - (0x2F3F, 'M', u'手'), - (0x2F40, 'M', u'支'), - (0x2F41, 'M', u'攴'), - (0x2F42, 'M', u'文'), - (0x2F43, 'M', u'斗'), - (0x2F44, 'M', u'斤'), - (0x2F45, 'M', u'方'), - (0x2F46, 'M', u'无'), - (0x2F47, 'M', u'日'), - (0x2F48, 'M', u'曰'), - (0x2F49, 'M', u'月'), - (0x2F4A, 'M', u'木'), - (0x2F4B, 'M', u'欠'), - (0x2F4C, 'M', u'止'), - (0x2F4D, 'M', u'歹'), - (0x2F4E, 'M', u'殳'), - (0x2F4F, 'M', u'毋'), - (0x2F50, 'M', u'比'), - (0x2F51, 'M', u'毛'), - (0x2F52, 'M', u'氏'), - (0x2F53, 'M', u'气'), - (0x2F54, 'M', u'水'), - (0x2F55, 'M', u'火'), - (0x2F56, 'M', u'爪'), - (0x2F57, 'M', u'父'), - (0x2F58, 'M', u'爻'), - (0x2F59, 'M', u'爿'), - (0x2F5A, 'M', u'片'), - (0x2F5B, 'M', u'牙'), - (0x2F5C, 'M', u'牛'), - (0x2F5D, 'M', u'犬'), - (0x2F5E, 'M', u'玄'), - (0x2F5F, 'M', u'玉'), - (0x2F60, 'M', u'瓜'), - (0x2F61, 'M', u'瓦'), - (0x2F62, 'M', u'甘'), - (0x2F63, 'M', u'生'), - (0x2F64, 'M', u'用'), - (0x2F65, 'M', u'田'), - (0x2F66, 'M', u'疋'), - (0x2F67, 'M', u'疒'), - (0x2F68, 'M', u'癶'), - (0x2F69, 'M', u'白'), - (0x2F6A, 'M', u'皮'), - (0x2F6B, 'M', u'皿'), - (0x2F6C, 'M', u'目'), - (0x2F6D, 'M', u'矛'), - (0x2F6E, 'M', u'矢'), - (0x2F6F, 'M', u'石'), - (0x2F70, 'M', u'示'), - (0x2F71, 'M', u'禸'), - (0x2F72, 'M', u'禾'), - (0x2F73, 'M', u'穴'), - (0x2F74, 'M', u'立'), - (0x2F75, 'M', u'竹'), - (0x2F76, 'M', u'米'), - (0x2F77, 'M', u'糸'), - (0x2F78, 'M', u'缶'), - (0x2F79, 'M', u'网'), - (0x2F7A, 'M', u'羊'), - (0x2F7B, 'M', u'羽'), - (0x2F7C, 'M', u'老'), - (0x2F7D, 'M', u'而'), - (0x2F7E, 'M', u'耒'), - (0x2F7F, 'M', u'耳'), - (0x2F80, 'M', u'聿'), - (0x2F81, 'M', u'肉'), - (0x2F82, 'M', u'臣'), - (0x2F83, 'M', u'自'), - (0x2F84, 'M', u'至'), - (0x2F85, 'M', u'臼'), - (0x2F86, 'M', u'舌'), - (0x2F87, 'M', u'舛'), - (0x2F88, 'M', u'舟'), - (0x2F89, 'M', u'艮'), - (0x2F8A, 'M', u'色'), - (0x2F8B, 'M', u'艸'), - (0x2F8C, 'M', u'虍'), - (0x2F8D, 'M', u'虫'), - (0x2F8E, 'M', u'血'), - (0x2F8F, 'M', u'行'), - (0x2F90, 'M', u'衣'), - (0x2F91, 'M', u'襾'), - ] - -def _seg_28(): - return [ - (0x2F92, 'M', u'見'), - (0x2F93, 'M', u'角'), - (0x2F94, 'M', u'言'), - (0x2F95, 'M', u'谷'), - (0x2F96, 'M', u'豆'), - (0x2F97, 'M', u'豕'), - (0x2F98, 'M', u'豸'), - (0x2F99, 'M', u'貝'), - (0x2F9A, 'M', u'赤'), - (0x2F9B, 'M', u'走'), - (0x2F9C, 'M', u'足'), - (0x2F9D, 'M', u'身'), - (0x2F9E, 'M', u'車'), - (0x2F9F, 'M', u'辛'), - (0x2FA0, 'M', u'辰'), - (0x2FA1, 'M', u'辵'), - (0x2FA2, 'M', u'邑'), - (0x2FA3, 'M', u'酉'), - (0x2FA4, 'M', u'釆'), - (0x2FA5, 'M', u'里'), - (0x2FA6, 'M', u'金'), - (0x2FA7, 'M', u'長'), - (0x2FA8, 'M', u'門'), - (0x2FA9, 'M', u'阜'), - (0x2FAA, 'M', u'隶'), - (0x2FAB, 'M', u'隹'), - (0x2FAC, 'M', u'雨'), - (0x2FAD, 'M', u'靑'), - (0x2FAE, 'M', u'非'), - (0x2FAF, 'M', u'面'), - (0x2FB0, 'M', u'革'), - (0x2FB1, 'M', u'韋'), - (0x2FB2, 'M', u'韭'), - (0x2FB3, 'M', u'音'), - (0x2FB4, 'M', u'頁'), - (0x2FB5, 'M', u'風'), - (0x2FB6, 'M', u'飛'), - (0x2FB7, 'M', u'食'), - (0x2FB8, 'M', u'首'), - (0x2FB9, 'M', u'香'), - (0x2FBA, 'M', u'馬'), - (0x2FBB, 'M', u'骨'), - (0x2FBC, 'M', u'高'), - (0x2FBD, 'M', u'髟'), - (0x2FBE, 'M', u'鬥'), - (0x2FBF, 'M', u'鬯'), - (0x2FC0, 'M', u'鬲'), - (0x2FC1, 'M', u'鬼'), - (0x2FC2, 'M', u'魚'), - (0x2FC3, 'M', u'鳥'), - (0x2FC4, 'M', u'鹵'), - (0x2FC5, 'M', u'鹿'), - (0x2FC6, 'M', u'麥'), - (0x2FC7, 'M', u'麻'), - (0x2FC8, 'M', u'黃'), - (0x2FC9, 'M', u'黍'), - (0x2FCA, 'M', u'黑'), - (0x2FCB, 'M', u'黹'), - (0x2FCC, 'M', u'黽'), - (0x2FCD, 'M', u'鼎'), - (0x2FCE, 'M', u'鼓'), - (0x2FCF, 'M', u'鼠'), - (0x2FD0, 'M', u'鼻'), - (0x2FD1, 'M', u'齊'), - (0x2FD2, 'M', u'齒'), - (0x2FD3, 'M', u'龍'), - (0x2FD4, 'M', u'龜'), - (0x2FD5, 'M', u'龠'), - (0x2FD6, 'X'), - (0x3000, '3', u' '), - (0x3001, 'V'), - (0x3002, 'M', u'.'), - (0x3003, 'V'), - (0x3036, 'M', u'〒'), - (0x3037, 'V'), - (0x3038, 'M', u'十'), - (0x3039, 'M', u'卄'), - (0x303A, 'M', u'卅'), - (0x303B, 'V'), - (0x3040, 'X'), - (0x3041, 'V'), - (0x3097, 'X'), - (0x3099, 'V'), - (0x309B, '3', u' ゙'), - (0x309C, '3', u' ゚'), - (0x309D, 'V'), - (0x309F, 'M', u'より'), - (0x30A0, 'V'), - (0x30FF, 'M', u'コト'), - (0x3100, 'X'), - (0x3105, 'V'), - (0x3130, 'X'), - (0x3131, 'M', u'ᄀ'), - (0x3132, 'M', u'ᄁ'), - (0x3133, 'M', u'ᆪ'), - (0x3134, 'M', u'ᄂ'), - (0x3135, 'M', u'ᆬ'), - (0x3136, 'M', u'ᆭ'), - (0x3137, 'M', u'ᄃ'), - (0x3138, 'M', u'ᄄ'), - ] - -def _seg_29(): - return [ - (0x3139, 'M', u'ᄅ'), - (0x313A, 'M', u'ᆰ'), - (0x313B, 'M', u'ᆱ'), - (0x313C, 'M', u'ᆲ'), - (0x313D, 'M', u'ᆳ'), - (0x313E, 'M', u'ᆴ'), - (0x313F, 'M', u'ᆵ'), - (0x3140, 'M', u'ᄚ'), - (0x3141, 'M', u'ᄆ'), - (0x3142, 'M', u'ᄇ'), - (0x3143, 'M', u'ᄈ'), - (0x3144, 'M', u'ᄡ'), - (0x3145, 'M', u'ᄉ'), - (0x3146, 'M', u'ᄊ'), - (0x3147, 'M', u'ᄋ'), - (0x3148, 'M', u'ᄌ'), - (0x3149, 'M', u'ᄍ'), - (0x314A, 'M', u'ᄎ'), - (0x314B, 'M', u'ᄏ'), - (0x314C, 'M', u'ᄐ'), - (0x314D, 'M', u'ᄑ'), - (0x314E, 'M', u'ᄒ'), - (0x314F, 'M', u'ᅡ'), - (0x3150, 'M', u'ᅢ'), - (0x3151, 'M', u'ᅣ'), - (0x3152, 'M', u'ᅤ'), - (0x3153, 'M', u'ᅥ'), - (0x3154, 'M', u'ᅦ'), - (0x3155, 'M', u'ᅧ'), - (0x3156, 'M', u'ᅨ'), - (0x3157, 'M', u'ᅩ'), - (0x3158, 'M', u'ᅪ'), - (0x3159, 'M', u'ᅫ'), - (0x315A, 'M', u'ᅬ'), - (0x315B, 'M', u'ᅭ'), - (0x315C, 'M', u'ᅮ'), - (0x315D, 'M', u'ᅯ'), - (0x315E, 'M', u'ᅰ'), - (0x315F, 'M', u'ᅱ'), - (0x3160, 'M', u'ᅲ'), - (0x3161, 'M', u'ᅳ'), - (0x3162, 'M', u'ᅴ'), - (0x3163, 'M', u'ᅵ'), - (0x3164, 'X'), - (0x3165, 'M', u'ᄔ'), - (0x3166, 'M', u'ᄕ'), - (0x3167, 'M', u'ᇇ'), - (0x3168, 'M', u'ᇈ'), - (0x3169, 'M', u'ᇌ'), - (0x316A, 'M', u'ᇎ'), - (0x316B, 'M', u'ᇓ'), - (0x316C, 'M', u'ᇗ'), - (0x316D, 'M', u'ᇙ'), - (0x316E, 'M', u'ᄜ'), - (0x316F, 'M', u'ᇝ'), - (0x3170, 'M', u'ᇟ'), - (0x3171, 'M', u'ᄝ'), - (0x3172, 'M', u'ᄞ'), - (0x3173, 'M', u'ᄠ'), - (0x3174, 'M', u'ᄢ'), - (0x3175, 'M', u'ᄣ'), - (0x3176, 'M', u'ᄧ'), - (0x3177, 'M', u'ᄩ'), - (0x3178, 'M', u'ᄫ'), - (0x3179, 'M', u'ᄬ'), - (0x317A, 'M', u'ᄭ'), - (0x317B, 'M', u'ᄮ'), - (0x317C, 'M', u'ᄯ'), - (0x317D, 'M', u'ᄲ'), - (0x317E, 'M', u'ᄶ'), - (0x317F, 'M', u'ᅀ'), - (0x3180, 'M', u'ᅇ'), - (0x3181, 'M', u'ᅌ'), - (0x3182, 'M', u'ᇱ'), - (0x3183, 'M', u'ᇲ'), - (0x3184, 'M', u'ᅗ'), - (0x3185, 'M', u'ᅘ'), - (0x3186, 'M', u'ᅙ'), - (0x3187, 'M', u'ᆄ'), - (0x3188, 'M', u'ᆅ'), - (0x3189, 'M', u'ᆈ'), - (0x318A, 'M', u'ᆑ'), - (0x318B, 'M', u'ᆒ'), - (0x318C, 'M', u'ᆔ'), - (0x318D, 'M', u'ᆞ'), - (0x318E, 'M', u'ᆡ'), - (0x318F, 'X'), - (0x3190, 'V'), - (0x3192, 'M', u'一'), - (0x3193, 'M', u'二'), - (0x3194, 'M', u'三'), - (0x3195, 'M', u'四'), - (0x3196, 'M', u'上'), - (0x3197, 'M', u'中'), - (0x3198, 'M', u'下'), - (0x3199, 'M', u'甲'), - (0x319A, 'M', u'乙'), - (0x319B, 'M', u'丙'), - (0x319C, 'M', u'丁'), - (0x319D, 'M', u'天'), - ] - -def _seg_30(): - return [ - (0x319E, 'M', u'地'), - (0x319F, 'M', u'人'), - (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), - (0x31E4, 'X'), - (0x31F0, 'V'), - (0x3200, '3', u'(ᄀ)'), - (0x3201, '3', u'(ᄂ)'), - (0x3202, '3', u'(ᄃ)'), - (0x3203, '3', u'(ᄅ)'), - (0x3204, '3', u'(ᄆ)'), - (0x3205, '3', u'(ᄇ)'), - (0x3206, '3', u'(ᄉ)'), - (0x3207, '3', u'(ᄋ)'), - (0x3208, '3', u'(ᄌ)'), - (0x3209, '3', u'(ᄎ)'), - (0x320A, '3', u'(ᄏ)'), - (0x320B, '3', u'(ᄐ)'), - (0x320C, '3', u'(ᄑ)'), - (0x320D, '3', u'(ᄒ)'), - (0x320E, '3', u'(가)'), - (0x320F, '3', u'(나)'), - (0x3210, '3', u'(다)'), - (0x3211, '3', u'(라)'), - (0x3212, '3', u'(마)'), - (0x3213, '3', u'(바)'), - (0x3214, '3', u'(사)'), - (0x3215, '3', u'(아)'), - (0x3216, '3', u'(자)'), - (0x3217, '3', u'(차)'), - (0x3218, '3', u'(카)'), - (0x3219, '3', u'(타)'), - (0x321A, '3', u'(파)'), - (0x321B, '3', u'(하)'), - (0x321C, '3', u'(주)'), - (0x321D, '3', u'(오전)'), - (0x321E, '3', u'(오후)'), - (0x321F, 'X'), - (0x3220, '3', u'(一)'), - (0x3221, '3', u'(二)'), - (0x3222, '3', u'(三)'), - (0x3223, '3', u'(四)'), - (0x3224, '3', u'(五)'), - (0x3225, '3', u'(六)'), - (0x3226, '3', u'(七)'), - (0x3227, '3', u'(八)'), - (0x3228, '3', u'(九)'), - (0x3229, '3', u'(十)'), - (0x322A, '3', u'(月)'), - (0x322B, '3', u'(火)'), - (0x322C, '3', u'(水)'), - (0x322D, '3', u'(木)'), - (0x322E, '3', u'(金)'), - (0x322F, '3', u'(土)'), - (0x3230, '3', u'(日)'), - (0x3231, '3', u'(株)'), - (0x3232, '3', u'(有)'), - (0x3233, '3', u'(社)'), - (0x3234, '3', u'(名)'), - (0x3235, '3', u'(特)'), - (0x3236, '3', u'(財)'), - (0x3237, '3', u'(祝)'), - (0x3238, '3', u'(労)'), - (0x3239, '3', u'(代)'), - (0x323A, '3', u'(呼)'), - (0x323B, '3', u'(学)'), - (0x323C, '3', u'(監)'), - (0x323D, '3', u'(企)'), - (0x323E, '3', u'(資)'), - (0x323F, '3', u'(協)'), - (0x3240, '3', u'(祭)'), - (0x3241, '3', u'(休)'), - (0x3242, '3', u'(自)'), - (0x3243, '3', u'(至)'), - (0x3244, 'M', u'問'), - (0x3245, 'M', u'幼'), - (0x3246, 'M', u'文'), - (0x3247, 'M', u'箏'), - (0x3248, 'V'), - (0x3250, 'M', u'pte'), - (0x3251, 'M', u'21'), - (0x3252, 'M', u'22'), - (0x3253, 'M', u'23'), - (0x3254, 'M', u'24'), - (0x3255, 'M', u'25'), - (0x3256, 'M', u'26'), - (0x3257, 'M', u'27'), - (0x3258, 'M', u'28'), - (0x3259, 'M', u'29'), - (0x325A, 'M', u'30'), - (0x325B, 'M', u'31'), - (0x325C, 'M', u'32'), - (0x325D, 'M', u'33'), - (0x325E, 'M', u'34'), - (0x325F, 'M', u'35'), - (0x3260, 'M', u'ᄀ'), - (0x3261, 'M', u'ᄂ'), - (0x3262, 'M', u'ᄃ'), - (0x3263, 'M', u'ᄅ'), - ] - -def _seg_31(): - return [ - (0x3264, 'M', u'ᄆ'), - (0x3265, 'M', u'ᄇ'), - (0x3266, 'M', u'ᄉ'), - (0x3267, 'M', u'ᄋ'), - (0x3268, 'M', u'ᄌ'), - (0x3269, 'M', u'ᄎ'), - (0x326A, 'M', u'ᄏ'), - (0x326B, 'M', u'ᄐ'), - (0x326C, 'M', u'ᄑ'), - (0x326D, 'M', u'ᄒ'), - (0x326E, 'M', u'가'), - (0x326F, 'M', u'나'), - (0x3270, 'M', u'다'), - (0x3271, 'M', u'라'), - (0x3272, 'M', u'마'), - (0x3273, 'M', u'바'), - (0x3274, 'M', u'사'), - (0x3275, 'M', u'아'), - (0x3276, 'M', u'자'), - (0x3277, 'M', u'차'), - (0x3278, 'M', u'카'), - (0x3279, 'M', u'타'), - (0x327A, 'M', u'파'), - (0x327B, 'M', u'하'), - (0x327C, 'M', u'참고'), - (0x327D, 'M', u'주의'), - (0x327E, 'M', u'우'), - (0x327F, 'V'), - (0x3280, 'M', u'一'), - (0x3281, 'M', u'二'), - (0x3282, 'M', u'三'), - (0x3283, 'M', u'四'), - (0x3284, 'M', u'五'), - (0x3285, 'M', u'六'), - (0x3286, 'M', u'七'), - (0x3287, 'M', u'八'), - (0x3288, 'M', u'九'), - (0x3289, 'M', u'十'), - (0x328A, 'M', u'月'), - (0x328B, 'M', u'火'), - (0x328C, 'M', u'水'), - (0x328D, 'M', u'木'), - (0x328E, 'M', u'金'), - (0x328F, 'M', u'土'), - (0x3290, 'M', u'日'), - (0x3291, 'M', u'株'), - (0x3292, 'M', u'有'), - (0x3293, 'M', u'社'), - (0x3294, 'M', u'名'), - (0x3295, 'M', u'特'), - (0x3296, 'M', u'財'), - (0x3297, 'M', u'祝'), - (0x3298, 'M', u'労'), - (0x3299, 'M', u'秘'), - (0x329A, 'M', u'男'), - (0x329B, 'M', u'女'), - (0x329C, 'M', u'適'), - (0x329D, 'M', u'優'), - (0x329E, 'M', u'印'), - (0x329F, 'M', u'注'), - (0x32A0, 'M', u'項'), - (0x32A1, 'M', u'休'), - (0x32A2, 'M', u'写'), - (0x32A3, 'M', u'正'), - (0x32A4, 'M', u'上'), - (0x32A5, 'M', u'中'), - (0x32A6, 'M', u'下'), - (0x32A7, 'M', u'左'), - (0x32A8, 'M', u'右'), - (0x32A9, 'M', u'医'), - (0x32AA, 'M', u'宗'), - (0x32AB, 'M', u'学'), - (0x32AC, 'M', u'監'), - (0x32AD, 'M', u'企'), - (0x32AE, 'M', u'資'), - (0x32AF, 'M', u'協'), - (0x32B0, 'M', u'夜'), - (0x32B1, 'M', u'36'), - (0x32B2, 'M', u'37'), - (0x32B3, 'M', u'38'), - (0x32B4, 'M', u'39'), - (0x32B5, 'M', u'40'), - (0x32B6, 'M', u'41'), - (0x32B7, 'M', u'42'), - (0x32B8, 'M', u'43'), - (0x32B9, 'M', u'44'), - (0x32BA, 'M', u'45'), - (0x32BB, 'M', u'46'), - (0x32BC, 'M', u'47'), - (0x32BD, 'M', u'48'), - (0x32BE, 'M', u'49'), - (0x32BF, 'M', u'50'), - (0x32C0, 'M', u'1月'), - (0x32C1, 'M', u'2月'), - (0x32C2, 'M', u'3月'), - (0x32C3, 'M', u'4月'), - (0x32C4, 'M', u'5月'), - (0x32C5, 'M', u'6月'), - (0x32C6, 'M', u'7月'), - (0x32C7, 'M', u'8月'), - ] - -def _seg_32(): - return [ - (0x32C8, 'M', u'9月'), - (0x32C9, 'M', u'10月'), - (0x32CA, 'M', u'11月'), - (0x32CB, 'M', u'12月'), - (0x32CC, 'M', u'hg'), - (0x32CD, 'M', u'erg'), - (0x32CE, 'M', u'ev'), - (0x32CF, 'M', u'ltd'), - (0x32D0, 'M', u'ア'), - (0x32D1, 'M', u'イ'), - (0x32D2, 'M', u'ウ'), - (0x32D3, 'M', u'エ'), - (0x32D4, 'M', u'オ'), - (0x32D5, 'M', u'カ'), - (0x32D6, 'M', u'キ'), - (0x32D7, 'M', u'ク'), - (0x32D8, 'M', u'ケ'), - (0x32D9, 'M', u'コ'), - (0x32DA, 'M', u'サ'), - (0x32DB, 'M', u'シ'), - (0x32DC, 'M', u'ス'), - (0x32DD, 'M', u'セ'), - (0x32DE, 'M', u'ソ'), - (0x32DF, 'M', u'タ'), - (0x32E0, 'M', u'チ'), - (0x32E1, 'M', u'ツ'), - (0x32E2, 'M', u'テ'), - (0x32E3, 'M', u'ト'), - (0x32E4, 'M', u'ナ'), - (0x32E5, 'M', u'ニ'), - (0x32E6, 'M', u'ヌ'), - (0x32E7, 'M', u'ネ'), - (0x32E8, 'M', u'ノ'), - (0x32E9, 'M', u'ハ'), - (0x32EA, 'M', u'ヒ'), - (0x32EB, 'M', u'フ'), - (0x32EC, 'M', u'ヘ'), - (0x32ED, 'M', u'ホ'), - (0x32EE, 'M', u'マ'), - (0x32EF, 'M', u'ミ'), - (0x32F0, 'M', u'ム'), - (0x32F1, 'M', u'メ'), - (0x32F2, 'M', u'モ'), - (0x32F3, 'M', u'ヤ'), - (0x32F4, 'M', u'ユ'), - (0x32F5, 'M', u'ヨ'), - (0x32F6, 'M', u'ラ'), - (0x32F7, 'M', u'リ'), - (0x32F8, 'M', u'ル'), - (0x32F9, 'M', u'レ'), - (0x32FA, 'M', u'ロ'), - (0x32FB, 'M', u'ワ'), - (0x32FC, 'M', u'ヰ'), - (0x32FD, 'M', u'ヱ'), - (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), - (0x3300, 'M', u'アパート'), - (0x3301, 'M', u'アルファ'), - (0x3302, 'M', u'アンペア'), - (0x3303, 'M', u'アール'), - (0x3304, 'M', u'イニング'), - (0x3305, 'M', u'インチ'), - (0x3306, 'M', u'ウォン'), - (0x3307, 'M', u'エスクード'), - (0x3308, 'M', u'エーカー'), - (0x3309, 'M', u'オンス'), - (0x330A, 'M', u'オーム'), - (0x330B, 'M', u'カイリ'), - (0x330C, 'M', u'カラット'), - (0x330D, 'M', u'カロリー'), - (0x330E, 'M', u'ガロン'), - (0x330F, 'M', u'ガンマ'), - (0x3310, 'M', u'ギガ'), - (0x3311, 'M', u'ギニー'), - (0x3312, 'M', u'キュリー'), - (0x3313, 'M', u'ギルダー'), - (0x3314, 'M', u'キロ'), - (0x3315, 'M', u'キログラム'), - (0x3316, 'M', u'キロメートル'), - (0x3317, 'M', u'キロワット'), - (0x3318, 'M', u'グラム'), - (0x3319, 'M', u'グラムトン'), - (0x331A, 'M', u'クルゼイロ'), - (0x331B, 'M', u'クローネ'), - (0x331C, 'M', u'ケース'), - (0x331D, 'M', u'コルナ'), - (0x331E, 'M', u'コーポ'), - (0x331F, 'M', u'サイクル'), - (0x3320, 'M', u'サンチーム'), - (0x3321, 'M', u'シリング'), - (0x3322, 'M', u'センチ'), - (0x3323, 'M', u'セント'), - (0x3324, 'M', u'ダース'), - (0x3325, 'M', u'デシ'), - (0x3326, 'M', u'ドル'), - (0x3327, 'M', u'トン'), - (0x3328, 'M', u'ナノ'), - (0x3329, 'M', u'ノット'), - (0x332A, 'M', u'ハイツ'), - (0x332B, 'M', u'パーセント'), - ] - -def _seg_33(): - return [ - (0x332C, 'M', u'パーツ'), - (0x332D, 'M', u'バーレル'), - (0x332E, 'M', u'ピアストル'), - (0x332F, 'M', u'ピクル'), - (0x3330, 'M', u'ピコ'), - (0x3331, 'M', u'ビル'), - (0x3332, 'M', u'ファラッド'), - (0x3333, 'M', u'フィート'), - (0x3334, 'M', u'ブッシェル'), - (0x3335, 'M', u'フラン'), - (0x3336, 'M', u'ヘクタール'), - (0x3337, 'M', u'ペソ'), - (0x3338, 'M', u'ペニヒ'), - (0x3339, 'M', u'ヘルツ'), - (0x333A, 'M', u'ペンス'), - (0x333B, 'M', u'ページ'), - (0x333C, 'M', u'ベータ'), - (0x333D, 'M', u'ポイント'), - (0x333E, 'M', u'ボルト'), - (0x333F, 'M', u'ホン'), - (0x3340, 'M', u'ポンド'), - (0x3341, 'M', u'ホール'), - (0x3342, 'M', u'ホーン'), - (0x3343, 'M', u'マイクロ'), - (0x3344, 'M', u'マイル'), - (0x3345, 'M', u'マッハ'), - (0x3346, 'M', u'マルク'), - (0x3347, 'M', u'マンション'), - (0x3348, 'M', u'ミクロン'), - (0x3349, 'M', u'ミリ'), - (0x334A, 'M', u'ミリバール'), - (0x334B, 'M', u'メガ'), - (0x334C, 'M', u'メガトン'), - (0x334D, 'M', u'メートル'), - (0x334E, 'M', u'ヤード'), - (0x334F, 'M', u'ヤール'), - (0x3350, 'M', u'ユアン'), - (0x3351, 'M', u'リットル'), - (0x3352, 'M', u'リラ'), - (0x3353, 'M', u'ルピー'), - (0x3354, 'M', u'ルーブル'), - (0x3355, 'M', u'レム'), - (0x3356, 'M', u'レントゲン'), - (0x3357, 'M', u'ワット'), - (0x3358, 'M', u'0点'), - (0x3359, 'M', u'1点'), - (0x335A, 'M', u'2点'), - (0x335B, 'M', u'3点'), - (0x335C, 'M', u'4点'), - (0x335D, 'M', u'5点'), - (0x335E, 'M', u'6点'), - (0x335F, 'M', u'7点'), - (0x3360, 'M', u'8点'), - (0x3361, 'M', u'9点'), - (0x3362, 'M', u'10点'), - (0x3363, 'M', u'11点'), - (0x3364, 'M', u'12点'), - (0x3365, 'M', u'13点'), - (0x3366, 'M', u'14点'), - (0x3367, 'M', u'15点'), - (0x3368, 'M', u'16点'), - (0x3369, 'M', u'17点'), - (0x336A, 'M', u'18点'), - (0x336B, 'M', u'19点'), - (0x336C, 'M', u'20点'), - (0x336D, 'M', u'21点'), - (0x336E, 'M', u'22点'), - (0x336F, 'M', u'23点'), - (0x3370, 'M', u'24点'), - (0x3371, 'M', u'hpa'), - (0x3372, 'M', u'da'), - (0x3373, 'M', u'au'), - (0x3374, 'M', u'bar'), - (0x3375, 'M', u'ov'), - (0x3376, 'M', u'pc'), - (0x3377, 'M', u'dm'), - (0x3378, 'M', u'dm2'), - (0x3379, 'M', u'dm3'), - (0x337A, 'M', u'iu'), - (0x337B, 'M', u'平成'), - (0x337C, 'M', u'昭和'), - (0x337D, 'M', u'大正'), - (0x337E, 'M', u'明治'), - (0x337F, 'M', u'株式会社'), - (0x3380, 'M', u'pa'), - (0x3381, 'M', u'na'), - (0x3382, 'M', u'μa'), - (0x3383, 'M', u'ma'), - (0x3384, 'M', u'ka'), - (0x3385, 'M', u'kb'), - (0x3386, 'M', u'mb'), - (0x3387, 'M', u'gb'), - (0x3388, 'M', u'cal'), - (0x3389, 'M', u'kcal'), - (0x338A, 'M', u'pf'), - (0x338B, 'M', u'nf'), - (0x338C, 'M', u'μf'), - (0x338D, 'M', u'μg'), - (0x338E, 'M', u'mg'), - (0x338F, 'M', u'kg'), - ] - -def _seg_34(): - return [ - (0x3390, 'M', u'hz'), - (0x3391, 'M', u'khz'), - (0x3392, 'M', u'mhz'), - (0x3393, 'M', u'ghz'), - (0x3394, 'M', u'thz'), - (0x3395, 'M', u'μl'), - (0x3396, 'M', u'ml'), - (0x3397, 'M', u'dl'), - (0x3398, 'M', u'kl'), - (0x3399, 'M', u'fm'), - (0x339A, 'M', u'nm'), - (0x339B, 'M', u'μm'), - (0x339C, 'M', u'mm'), - (0x339D, 'M', u'cm'), - (0x339E, 'M', u'km'), - (0x339F, 'M', u'mm2'), - (0x33A0, 'M', u'cm2'), - (0x33A1, 'M', u'm2'), - (0x33A2, 'M', u'km2'), - (0x33A3, 'M', u'mm3'), - (0x33A4, 'M', u'cm3'), - (0x33A5, 'M', u'm3'), - (0x33A6, 'M', u'km3'), - (0x33A7, 'M', u'm∕s'), - (0x33A8, 'M', u'm∕s2'), - (0x33A9, 'M', u'pa'), - (0x33AA, 'M', u'kpa'), - (0x33AB, 'M', u'mpa'), - (0x33AC, 'M', u'gpa'), - (0x33AD, 'M', u'rad'), - (0x33AE, 'M', u'rad∕s'), - (0x33AF, 'M', u'rad∕s2'), - (0x33B0, 'M', u'ps'), - (0x33B1, 'M', u'ns'), - (0x33B2, 'M', u'μs'), - (0x33B3, 'M', u'ms'), - (0x33B4, 'M', u'pv'), - (0x33B5, 'M', u'nv'), - (0x33B6, 'M', u'μv'), - (0x33B7, 'M', u'mv'), - (0x33B8, 'M', u'kv'), - (0x33B9, 'M', u'mv'), - (0x33BA, 'M', u'pw'), - (0x33BB, 'M', u'nw'), - (0x33BC, 'M', u'μw'), - (0x33BD, 'M', u'mw'), - (0x33BE, 'M', u'kw'), - (0x33BF, 'M', u'mw'), - (0x33C0, 'M', u'kω'), - (0x33C1, 'M', u'mω'), - (0x33C2, 'X'), - (0x33C3, 'M', u'bq'), - (0x33C4, 'M', u'cc'), - (0x33C5, 'M', u'cd'), - (0x33C6, 'M', u'c∕kg'), - (0x33C7, 'X'), - (0x33C8, 'M', u'db'), - (0x33C9, 'M', u'gy'), - (0x33CA, 'M', u'ha'), - (0x33CB, 'M', u'hp'), - (0x33CC, 'M', u'in'), - (0x33CD, 'M', u'kk'), - (0x33CE, 'M', u'km'), - (0x33CF, 'M', u'kt'), - (0x33D0, 'M', u'lm'), - (0x33D1, 'M', u'ln'), - (0x33D2, 'M', u'log'), - (0x33D3, 'M', u'lx'), - (0x33D4, 'M', u'mb'), - (0x33D5, 'M', u'mil'), - (0x33D6, 'M', u'mol'), - (0x33D7, 'M', u'ph'), - (0x33D8, 'X'), - (0x33D9, 'M', u'ppm'), - (0x33DA, 'M', u'pr'), - (0x33DB, 'M', u'sr'), - (0x33DC, 'M', u'sv'), - (0x33DD, 'M', u'wb'), - (0x33DE, 'M', u'v∕m'), - (0x33DF, 'M', u'a∕m'), - (0x33E0, 'M', u'1日'), - (0x33E1, 'M', u'2日'), - (0x33E2, 'M', u'3日'), - (0x33E3, 'M', u'4日'), - (0x33E4, 'M', u'5日'), - (0x33E5, 'M', u'6日'), - (0x33E6, 'M', u'7日'), - (0x33E7, 'M', u'8日'), - (0x33E8, 'M', u'9日'), - (0x33E9, 'M', u'10日'), - (0x33EA, 'M', u'11日'), - (0x33EB, 'M', u'12日'), - (0x33EC, 'M', u'13日'), - (0x33ED, 'M', u'14日'), - (0x33EE, 'M', u'15日'), - (0x33EF, 'M', u'16日'), - (0x33F0, 'M', u'17日'), - (0x33F1, 'M', u'18日'), - (0x33F2, 'M', u'19日'), - (0x33F3, 'M', u'20日'), - ] - -def _seg_35(): - return [ - (0x33F4, 'M', u'21日'), - (0x33F5, 'M', u'22日'), - (0x33F6, 'M', u'23日'), - (0x33F7, 'M', u'24日'), - (0x33F8, 'M', u'25日'), - (0x33F9, 'M', u'26日'), - (0x33FA, 'M', u'27日'), - (0x33FB, 'M', u'28日'), - (0x33FC, 'M', u'29日'), - (0x33FD, 'M', u'30日'), - (0x33FE, 'M', u'31日'), - (0x33FF, 'M', u'gal'), - (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), - (0xA000, 'V'), - (0xA48D, 'X'), - (0xA490, 'V'), - (0xA4C7, 'X'), - (0xA4D0, 'V'), - (0xA62C, 'X'), - (0xA640, 'M', u'ꙁ'), - (0xA641, 'V'), - (0xA642, 'M', u'ꙃ'), - (0xA643, 'V'), - (0xA644, 'M', u'ꙅ'), - (0xA645, 'V'), - (0xA646, 'M', u'ꙇ'), - (0xA647, 'V'), - (0xA648, 'M', u'ꙉ'), - (0xA649, 'V'), - (0xA64A, 'M', u'ꙋ'), - (0xA64B, 'V'), - (0xA64C, 'M', u'ꙍ'), - (0xA64D, 'V'), - (0xA64E, 'M', u'ꙏ'), - (0xA64F, 'V'), - (0xA650, 'M', u'ꙑ'), - (0xA651, 'V'), - (0xA652, 'M', u'ꙓ'), - (0xA653, 'V'), - (0xA654, 'M', u'ꙕ'), - (0xA655, 'V'), - (0xA656, 'M', u'ꙗ'), - (0xA657, 'V'), - (0xA658, 'M', u'ꙙ'), - (0xA659, 'V'), - (0xA65A, 'M', u'ꙛ'), - (0xA65B, 'V'), - (0xA65C, 'M', u'ꙝ'), - (0xA65D, 'V'), - (0xA65E, 'M', u'ꙟ'), - (0xA65F, 'V'), - (0xA660, 'M', u'ꙡ'), - (0xA661, 'V'), - (0xA662, 'M', u'ꙣ'), - (0xA663, 'V'), - (0xA664, 'M', u'ꙥ'), - (0xA665, 'V'), - (0xA666, 'M', u'ꙧ'), - (0xA667, 'V'), - (0xA668, 'M', u'ꙩ'), - (0xA669, 'V'), - (0xA66A, 'M', u'ꙫ'), - (0xA66B, 'V'), - (0xA66C, 'M', u'ꙭ'), - (0xA66D, 'V'), - (0xA680, 'M', u'ꚁ'), - (0xA681, 'V'), - (0xA682, 'M', u'ꚃ'), - (0xA683, 'V'), - (0xA684, 'M', u'ꚅ'), - (0xA685, 'V'), - (0xA686, 'M', u'ꚇ'), - (0xA687, 'V'), - (0xA688, 'M', u'ꚉ'), - (0xA689, 'V'), - (0xA68A, 'M', u'ꚋ'), - (0xA68B, 'V'), - (0xA68C, 'M', u'ꚍ'), - (0xA68D, 'V'), - (0xA68E, 'M', u'ꚏ'), - (0xA68F, 'V'), - (0xA690, 'M', u'ꚑ'), - (0xA691, 'V'), - (0xA692, 'M', u'ꚓ'), - (0xA693, 'V'), - (0xA694, 'M', u'ꚕ'), - (0xA695, 'V'), - (0xA696, 'M', u'ꚗ'), - (0xA697, 'V'), - (0xA698, 'M', u'ꚙ'), - (0xA699, 'V'), - (0xA69A, 'M', u'ꚛ'), - (0xA69B, 'V'), - (0xA69C, 'M', u'ъ'), - (0xA69D, 'M', u'ь'), - (0xA69E, 'V'), - (0xA6F8, 'X'), - ] - -def _seg_36(): - return [ - (0xA700, 'V'), - (0xA722, 'M', u'ꜣ'), - (0xA723, 'V'), - (0xA724, 'M', u'ꜥ'), - (0xA725, 'V'), - (0xA726, 'M', u'ꜧ'), - (0xA727, 'V'), - (0xA728, 'M', u'ꜩ'), - (0xA729, 'V'), - (0xA72A, 'M', u'ꜫ'), - (0xA72B, 'V'), - (0xA72C, 'M', u'ꜭ'), - (0xA72D, 'V'), - (0xA72E, 'M', u'ꜯ'), - (0xA72F, 'V'), - (0xA732, 'M', u'ꜳ'), - (0xA733, 'V'), - (0xA734, 'M', u'ꜵ'), - (0xA735, 'V'), - (0xA736, 'M', u'ꜷ'), - (0xA737, 'V'), - (0xA738, 'M', u'ꜹ'), - (0xA739, 'V'), - (0xA73A, 'M', u'ꜻ'), - (0xA73B, 'V'), - (0xA73C, 'M', u'ꜽ'), - (0xA73D, 'V'), - (0xA73E, 'M', u'ꜿ'), - (0xA73F, 'V'), - (0xA740, 'M', u'ꝁ'), - (0xA741, 'V'), - (0xA742, 'M', u'ꝃ'), - (0xA743, 'V'), - (0xA744, 'M', u'ꝅ'), - (0xA745, 'V'), - (0xA746, 'M', u'ꝇ'), - (0xA747, 'V'), - (0xA748, 'M', u'ꝉ'), - (0xA749, 'V'), - (0xA74A, 'M', u'ꝋ'), - (0xA74B, 'V'), - (0xA74C, 'M', u'ꝍ'), - (0xA74D, 'V'), - (0xA74E, 'M', u'ꝏ'), - (0xA74F, 'V'), - (0xA750, 'M', u'ꝑ'), - (0xA751, 'V'), - (0xA752, 'M', u'ꝓ'), - (0xA753, 'V'), - (0xA754, 'M', u'ꝕ'), - (0xA755, 'V'), - (0xA756, 'M', u'ꝗ'), - (0xA757, 'V'), - (0xA758, 'M', u'ꝙ'), - (0xA759, 'V'), - (0xA75A, 'M', u'ꝛ'), - (0xA75B, 'V'), - (0xA75C, 'M', u'ꝝ'), - (0xA75D, 'V'), - (0xA75E, 'M', u'ꝟ'), - (0xA75F, 'V'), - (0xA760, 'M', u'ꝡ'), - (0xA761, 'V'), - (0xA762, 'M', u'ꝣ'), - (0xA763, 'V'), - (0xA764, 'M', u'ꝥ'), - (0xA765, 'V'), - (0xA766, 'M', u'ꝧ'), - (0xA767, 'V'), - (0xA768, 'M', u'ꝩ'), - (0xA769, 'V'), - (0xA76A, 'M', u'ꝫ'), - (0xA76B, 'V'), - (0xA76C, 'M', u'ꝭ'), - (0xA76D, 'V'), - (0xA76E, 'M', u'ꝯ'), - (0xA76F, 'V'), - (0xA770, 'M', u'ꝯ'), - (0xA771, 'V'), - (0xA779, 'M', u'ꝺ'), - (0xA77A, 'V'), - (0xA77B, 'M', u'ꝼ'), - (0xA77C, 'V'), - (0xA77D, 'M', u'ᵹ'), - (0xA77E, 'M', u'ꝿ'), - (0xA77F, 'V'), - (0xA780, 'M', u'ꞁ'), - (0xA781, 'V'), - (0xA782, 'M', u'ꞃ'), - (0xA783, 'V'), - (0xA784, 'M', u'ꞅ'), - (0xA785, 'V'), - (0xA786, 'M', u'ꞇ'), - (0xA787, 'V'), - (0xA78B, 'M', u'ꞌ'), - (0xA78C, 'V'), - (0xA78D, 'M', u'ɥ'), - (0xA78E, 'V'), - (0xA790, 'M', u'ꞑ'), - (0xA791, 'V'), - ] - -def _seg_37(): - return [ - (0xA792, 'M', u'ꞓ'), - (0xA793, 'V'), - (0xA796, 'M', u'ꞗ'), - (0xA797, 'V'), - (0xA798, 'M', u'ꞙ'), - (0xA799, 'V'), - (0xA79A, 'M', u'ꞛ'), - (0xA79B, 'V'), - (0xA79C, 'M', u'ꞝ'), - (0xA79D, 'V'), - (0xA79E, 'M', u'ꞟ'), - (0xA79F, 'V'), - (0xA7A0, 'M', u'ꞡ'), - (0xA7A1, 'V'), - (0xA7A2, 'M', u'ꞣ'), - (0xA7A3, 'V'), - (0xA7A4, 'M', u'ꞥ'), - (0xA7A5, 'V'), - (0xA7A6, 'M', u'ꞧ'), - (0xA7A7, 'V'), - (0xA7A8, 'M', u'ꞩ'), - (0xA7A9, 'V'), - (0xA7AA, 'M', u'ɦ'), - (0xA7AB, 'M', u'ɜ'), - (0xA7AC, 'M', u'ɡ'), - (0xA7AD, 'M', u'ɬ'), - (0xA7AE, 'M', u'ɪ'), - (0xA7AF, 'V'), - (0xA7B0, 'M', u'ʞ'), - (0xA7B1, 'M', u'ʇ'), - (0xA7B2, 'M', u'ʝ'), - (0xA7B3, 'M', u'ꭓ'), - (0xA7B4, 'M', u'ꞵ'), - (0xA7B5, 'V'), - (0xA7B6, 'M', u'ꞷ'), - (0xA7B7, 'V'), - (0xA7B8, 'X'), - (0xA7B9, 'V'), - (0xA7BA, 'X'), - (0xA7F7, 'V'), - (0xA7F8, 'M', u'ħ'), - (0xA7F9, 'M', u'œ'), - (0xA7FA, 'V'), - (0xA82C, 'X'), - (0xA830, 'V'), - (0xA83A, 'X'), - (0xA840, 'V'), - (0xA878, 'X'), - (0xA880, 'V'), - (0xA8C6, 'X'), - (0xA8CE, 'V'), - (0xA8DA, 'X'), - (0xA8E0, 'V'), - (0xA954, 'X'), - (0xA95F, 'V'), - (0xA97D, 'X'), - (0xA980, 'V'), - (0xA9CE, 'X'), - (0xA9CF, 'V'), - (0xA9DA, 'X'), - (0xA9DE, 'V'), - (0xA9FF, 'X'), - (0xAA00, 'V'), - (0xAA37, 'X'), - (0xAA40, 'V'), - (0xAA4E, 'X'), - (0xAA50, 'V'), - (0xAA5A, 'X'), - (0xAA5C, 'V'), - (0xAAC3, 'X'), - (0xAADB, 'V'), - (0xAAF7, 'X'), - (0xAB01, 'V'), - (0xAB07, 'X'), - (0xAB09, 'V'), - (0xAB0F, 'X'), - (0xAB11, 'V'), - (0xAB17, 'X'), - (0xAB20, 'V'), - (0xAB27, 'X'), - (0xAB28, 'V'), - (0xAB2F, 'X'), - (0xAB30, 'V'), - (0xAB5C, 'M', u'ꜧ'), - (0xAB5D, 'M', u'ꬷ'), - (0xAB5E, 'M', u'ɫ'), - (0xAB5F, 'M', u'ꭒ'), - (0xAB60, 'V'), - (0xAB66, 'X'), - (0xAB70, 'M', u'Ꭰ'), - (0xAB71, 'M', u'Ꭱ'), - (0xAB72, 'M', u'Ꭲ'), - (0xAB73, 'M', u'Ꭳ'), - (0xAB74, 'M', u'Ꭴ'), - (0xAB75, 'M', u'Ꭵ'), - (0xAB76, 'M', u'Ꭶ'), - (0xAB77, 'M', u'Ꭷ'), - (0xAB78, 'M', u'Ꭸ'), - (0xAB79, 'M', u'Ꭹ'), - (0xAB7A, 'M', u'Ꭺ'), - ] - -def _seg_38(): - return [ - (0xAB7B, 'M', u'Ꭻ'), - (0xAB7C, 'M', u'Ꭼ'), - (0xAB7D, 'M', u'Ꭽ'), - (0xAB7E, 'M', u'Ꭾ'), - (0xAB7F, 'M', u'Ꭿ'), - (0xAB80, 'M', u'Ꮀ'), - (0xAB81, 'M', u'Ꮁ'), - (0xAB82, 'M', u'Ꮂ'), - (0xAB83, 'M', u'Ꮃ'), - (0xAB84, 'M', u'Ꮄ'), - (0xAB85, 'M', u'Ꮅ'), - (0xAB86, 'M', u'Ꮆ'), - (0xAB87, 'M', u'Ꮇ'), - (0xAB88, 'M', u'Ꮈ'), - (0xAB89, 'M', u'Ꮉ'), - (0xAB8A, 'M', u'Ꮊ'), - (0xAB8B, 'M', u'Ꮋ'), - (0xAB8C, 'M', u'Ꮌ'), - (0xAB8D, 'M', u'Ꮍ'), - (0xAB8E, 'M', u'Ꮎ'), - (0xAB8F, 'M', u'Ꮏ'), - (0xAB90, 'M', u'Ꮐ'), - (0xAB91, 'M', u'Ꮑ'), - (0xAB92, 'M', u'Ꮒ'), - (0xAB93, 'M', u'Ꮓ'), - (0xAB94, 'M', u'Ꮔ'), - (0xAB95, 'M', u'Ꮕ'), - (0xAB96, 'M', u'Ꮖ'), - (0xAB97, 'M', u'Ꮗ'), - (0xAB98, 'M', u'Ꮘ'), - (0xAB99, 'M', u'Ꮙ'), - (0xAB9A, 'M', u'Ꮚ'), - (0xAB9B, 'M', u'Ꮛ'), - (0xAB9C, 'M', u'Ꮜ'), - (0xAB9D, 'M', u'Ꮝ'), - (0xAB9E, 'M', u'Ꮞ'), - (0xAB9F, 'M', u'Ꮟ'), - (0xABA0, 'M', u'Ꮠ'), - (0xABA1, 'M', u'Ꮡ'), - (0xABA2, 'M', u'Ꮢ'), - (0xABA3, 'M', u'Ꮣ'), - (0xABA4, 'M', u'Ꮤ'), - (0xABA5, 'M', u'Ꮥ'), - (0xABA6, 'M', u'Ꮦ'), - (0xABA7, 'M', u'Ꮧ'), - (0xABA8, 'M', u'Ꮨ'), - (0xABA9, 'M', u'Ꮩ'), - (0xABAA, 'M', u'Ꮪ'), - (0xABAB, 'M', u'Ꮫ'), - (0xABAC, 'M', u'Ꮬ'), - (0xABAD, 'M', u'Ꮭ'), - (0xABAE, 'M', u'Ꮮ'), - (0xABAF, 'M', u'Ꮯ'), - (0xABB0, 'M', u'Ꮰ'), - (0xABB1, 'M', u'Ꮱ'), - (0xABB2, 'M', u'Ꮲ'), - (0xABB3, 'M', u'Ꮳ'), - (0xABB4, 'M', u'Ꮴ'), - (0xABB5, 'M', u'Ꮵ'), - (0xABB6, 'M', u'Ꮶ'), - (0xABB7, 'M', u'Ꮷ'), - (0xABB8, 'M', u'Ꮸ'), - (0xABB9, 'M', u'Ꮹ'), - (0xABBA, 'M', u'Ꮺ'), - (0xABBB, 'M', u'Ꮻ'), - (0xABBC, 'M', u'Ꮼ'), - (0xABBD, 'M', u'Ꮽ'), - (0xABBE, 'M', u'Ꮾ'), - (0xABBF, 'M', u'Ꮿ'), - (0xABC0, 'V'), - (0xABEE, 'X'), - (0xABF0, 'V'), - (0xABFA, 'X'), - (0xAC00, 'V'), - (0xD7A4, 'X'), - (0xD7B0, 'V'), - (0xD7C7, 'X'), - (0xD7CB, 'V'), - (0xD7FC, 'X'), - (0xF900, 'M', u'豈'), - (0xF901, 'M', u'更'), - (0xF902, 'M', u'車'), - (0xF903, 'M', u'賈'), - (0xF904, 'M', u'滑'), - (0xF905, 'M', u'串'), - (0xF906, 'M', u'句'), - (0xF907, 'M', u'龜'), - (0xF909, 'M', u'契'), - (0xF90A, 'M', u'金'), - (0xF90B, 'M', u'喇'), - (0xF90C, 'M', u'奈'), - (0xF90D, 'M', u'懶'), - (0xF90E, 'M', u'癩'), - (0xF90F, 'M', u'羅'), - (0xF910, 'M', u'蘿'), - (0xF911, 'M', u'螺'), - (0xF912, 'M', u'裸'), - (0xF913, 'M', u'邏'), - (0xF914, 'M', u'樂'), - (0xF915, 'M', u'洛'), - ] - -def _seg_39(): - return [ - (0xF916, 'M', u'烙'), - (0xF917, 'M', u'珞'), - (0xF918, 'M', u'落'), - (0xF919, 'M', u'酪'), - (0xF91A, 'M', u'駱'), - (0xF91B, 'M', u'亂'), - (0xF91C, 'M', u'卵'), - (0xF91D, 'M', u'欄'), - (0xF91E, 'M', u'爛'), - (0xF91F, 'M', u'蘭'), - (0xF920, 'M', u'鸞'), - (0xF921, 'M', u'嵐'), - (0xF922, 'M', u'濫'), - (0xF923, 'M', u'藍'), - (0xF924, 'M', u'襤'), - (0xF925, 'M', u'拉'), - (0xF926, 'M', u'臘'), - (0xF927, 'M', u'蠟'), - (0xF928, 'M', u'廊'), - (0xF929, 'M', u'朗'), - (0xF92A, 'M', u'浪'), - (0xF92B, 'M', u'狼'), - (0xF92C, 'M', u'郎'), - (0xF92D, 'M', u'來'), - (0xF92E, 'M', u'冷'), - (0xF92F, 'M', u'勞'), - (0xF930, 'M', u'擄'), - (0xF931, 'M', u'櫓'), - (0xF932, 'M', u'爐'), - (0xF933, 'M', u'盧'), - (0xF934, 'M', u'老'), - (0xF935, 'M', u'蘆'), - (0xF936, 'M', u'虜'), - (0xF937, 'M', u'路'), - (0xF938, 'M', u'露'), - (0xF939, 'M', u'魯'), - (0xF93A, 'M', u'鷺'), - (0xF93B, 'M', u'碌'), - (0xF93C, 'M', u'祿'), - (0xF93D, 'M', u'綠'), - (0xF93E, 'M', u'菉'), - (0xF93F, 'M', u'錄'), - (0xF940, 'M', u'鹿'), - (0xF941, 'M', u'論'), - (0xF942, 'M', u'壟'), - (0xF943, 'M', u'弄'), - (0xF944, 'M', u'籠'), - (0xF945, 'M', u'聾'), - (0xF946, 'M', u'牢'), - (0xF947, 'M', u'磊'), - (0xF948, 'M', u'賂'), - (0xF949, 'M', u'雷'), - (0xF94A, 'M', u'壘'), - (0xF94B, 'M', u'屢'), - (0xF94C, 'M', u'樓'), - (0xF94D, 'M', u'淚'), - (0xF94E, 'M', u'漏'), - (0xF94F, 'M', u'累'), - (0xF950, 'M', u'縷'), - (0xF951, 'M', u'陋'), - (0xF952, 'M', u'勒'), - (0xF953, 'M', u'肋'), - (0xF954, 'M', u'凜'), - (0xF955, 'M', u'凌'), - (0xF956, 'M', u'稜'), - (0xF957, 'M', u'綾'), - (0xF958, 'M', u'菱'), - (0xF959, 'M', u'陵'), - (0xF95A, 'M', u'讀'), - (0xF95B, 'M', u'拏'), - (0xF95C, 'M', u'樂'), - (0xF95D, 'M', u'諾'), - (0xF95E, 'M', u'丹'), - (0xF95F, 'M', u'寧'), - (0xF960, 'M', u'怒'), - (0xF961, 'M', u'率'), - (0xF962, 'M', u'異'), - (0xF963, 'M', u'北'), - (0xF964, 'M', u'磻'), - (0xF965, 'M', u'便'), - (0xF966, 'M', u'復'), - (0xF967, 'M', u'不'), - (0xF968, 'M', u'泌'), - (0xF969, 'M', u'數'), - (0xF96A, 'M', u'索'), - (0xF96B, 'M', u'參'), - (0xF96C, 'M', u'塞'), - (0xF96D, 'M', u'省'), - (0xF96E, 'M', u'葉'), - (0xF96F, 'M', u'說'), - (0xF970, 'M', u'殺'), - (0xF971, 'M', u'辰'), - (0xF972, 'M', u'沈'), - (0xF973, 'M', u'拾'), - (0xF974, 'M', u'若'), - (0xF975, 'M', u'掠'), - (0xF976, 'M', u'略'), - (0xF977, 'M', u'亮'), - (0xF978, 'M', u'兩'), - (0xF979, 'M', u'凉'), - ] - -def _seg_40(): - return [ - (0xF97A, 'M', u'梁'), - (0xF97B, 'M', u'糧'), - (0xF97C, 'M', u'良'), - (0xF97D, 'M', u'諒'), - (0xF97E, 'M', u'量'), - (0xF97F, 'M', u'勵'), - (0xF980, 'M', u'呂'), - (0xF981, 'M', u'女'), - (0xF982, 'M', u'廬'), - (0xF983, 'M', u'旅'), - (0xF984, 'M', u'濾'), - (0xF985, 'M', u'礪'), - (0xF986, 'M', u'閭'), - (0xF987, 'M', u'驪'), - (0xF988, 'M', u'麗'), - (0xF989, 'M', u'黎'), - (0xF98A, 'M', u'力'), - (0xF98B, 'M', u'曆'), - (0xF98C, 'M', u'歷'), - (0xF98D, 'M', u'轢'), - (0xF98E, 'M', u'年'), - (0xF98F, 'M', u'憐'), - (0xF990, 'M', u'戀'), - (0xF991, 'M', u'撚'), - (0xF992, 'M', u'漣'), - (0xF993, 'M', u'煉'), - (0xF994, 'M', u'璉'), - (0xF995, 'M', u'秊'), - (0xF996, 'M', u'練'), - (0xF997, 'M', u'聯'), - (0xF998, 'M', u'輦'), - (0xF999, 'M', u'蓮'), - (0xF99A, 'M', u'連'), - (0xF99B, 'M', u'鍊'), - (0xF99C, 'M', u'列'), - (0xF99D, 'M', u'劣'), - (0xF99E, 'M', u'咽'), - (0xF99F, 'M', u'烈'), - (0xF9A0, 'M', u'裂'), - (0xF9A1, 'M', u'說'), - (0xF9A2, 'M', u'廉'), - (0xF9A3, 'M', u'念'), - (0xF9A4, 'M', u'捻'), - (0xF9A5, 'M', u'殮'), - (0xF9A6, 'M', u'簾'), - (0xF9A7, 'M', u'獵'), - (0xF9A8, 'M', u'令'), - (0xF9A9, 'M', u'囹'), - (0xF9AA, 'M', u'寧'), - (0xF9AB, 'M', u'嶺'), - (0xF9AC, 'M', u'怜'), - (0xF9AD, 'M', u'玲'), - (0xF9AE, 'M', u'瑩'), - (0xF9AF, 'M', u'羚'), - (0xF9B0, 'M', u'聆'), - (0xF9B1, 'M', u'鈴'), - (0xF9B2, 'M', u'零'), - (0xF9B3, 'M', u'靈'), - (0xF9B4, 'M', u'領'), - (0xF9B5, 'M', u'例'), - (0xF9B6, 'M', u'禮'), - (0xF9B7, 'M', u'醴'), - (0xF9B8, 'M', u'隸'), - (0xF9B9, 'M', u'惡'), - (0xF9BA, 'M', u'了'), - (0xF9BB, 'M', u'僚'), - (0xF9BC, 'M', u'寮'), - (0xF9BD, 'M', u'尿'), - (0xF9BE, 'M', u'料'), - (0xF9BF, 'M', u'樂'), - (0xF9C0, 'M', u'燎'), - (0xF9C1, 'M', u'療'), - (0xF9C2, 'M', u'蓼'), - (0xF9C3, 'M', u'遼'), - (0xF9C4, 'M', u'龍'), - (0xF9C5, 'M', u'暈'), - (0xF9C6, 'M', u'阮'), - (0xF9C7, 'M', u'劉'), - (0xF9C8, 'M', u'杻'), - (0xF9C9, 'M', u'柳'), - (0xF9CA, 'M', u'流'), - (0xF9CB, 'M', u'溜'), - (0xF9CC, 'M', u'琉'), - (0xF9CD, 'M', u'留'), - (0xF9CE, 'M', u'硫'), - (0xF9CF, 'M', u'紐'), - (0xF9D0, 'M', u'類'), - (0xF9D1, 'M', u'六'), - (0xF9D2, 'M', u'戮'), - (0xF9D3, 'M', u'陸'), - (0xF9D4, 'M', u'倫'), - (0xF9D5, 'M', u'崙'), - (0xF9D6, 'M', u'淪'), - (0xF9D7, 'M', u'輪'), - (0xF9D8, 'M', u'律'), - (0xF9D9, 'M', u'慄'), - (0xF9DA, 'M', u'栗'), - (0xF9DB, 'M', u'率'), - (0xF9DC, 'M', u'隆'), - (0xF9DD, 'M', u'利'), - ] - -def _seg_41(): - return [ - (0xF9DE, 'M', u'吏'), - (0xF9DF, 'M', u'履'), - (0xF9E0, 'M', u'易'), - (0xF9E1, 'M', u'李'), - (0xF9E2, 'M', u'梨'), - (0xF9E3, 'M', u'泥'), - (0xF9E4, 'M', u'理'), - (0xF9E5, 'M', u'痢'), - (0xF9E6, 'M', u'罹'), - (0xF9E7, 'M', u'裏'), - (0xF9E8, 'M', u'裡'), - (0xF9E9, 'M', u'里'), - (0xF9EA, 'M', u'離'), - (0xF9EB, 'M', u'匿'), - (0xF9EC, 'M', u'溺'), - (0xF9ED, 'M', u'吝'), - (0xF9EE, 'M', u'燐'), - (0xF9EF, 'M', u'璘'), - (0xF9F0, 'M', u'藺'), - (0xF9F1, 'M', u'隣'), - (0xF9F2, 'M', u'鱗'), - (0xF9F3, 'M', u'麟'), - (0xF9F4, 'M', u'林'), - (0xF9F5, 'M', u'淋'), - (0xF9F6, 'M', u'臨'), - (0xF9F7, 'M', u'立'), - (0xF9F8, 'M', u'笠'), - (0xF9F9, 'M', u'粒'), - (0xF9FA, 'M', u'狀'), - (0xF9FB, 'M', u'炙'), - (0xF9FC, 'M', u'識'), - (0xF9FD, 'M', u'什'), - (0xF9FE, 'M', u'茶'), - (0xF9FF, 'M', u'刺'), - (0xFA00, 'M', u'切'), - (0xFA01, 'M', u'度'), - (0xFA02, 'M', u'拓'), - (0xFA03, 'M', u'糖'), - (0xFA04, 'M', u'宅'), - (0xFA05, 'M', u'洞'), - (0xFA06, 'M', u'暴'), - (0xFA07, 'M', u'輻'), - (0xFA08, 'M', u'行'), - (0xFA09, 'M', u'降'), - (0xFA0A, 'M', u'見'), - (0xFA0B, 'M', u'廓'), - (0xFA0C, 'M', u'兀'), - (0xFA0D, 'M', u'嗀'), - (0xFA0E, 'V'), - (0xFA10, 'M', u'塚'), - (0xFA11, 'V'), - (0xFA12, 'M', u'晴'), - (0xFA13, 'V'), - (0xFA15, 'M', u'凞'), - (0xFA16, 'M', u'猪'), - (0xFA17, 'M', u'益'), - (0xFA18, 'M', u'礼'), - (0xFA19, 'M', u'神'), - (0xFA1A, 'M', u'祥'), - (0xFA1B, 'M', u'福'), - (0xFA1C, 'M', u'靖'), - (0xFA1D, 'M', u'精'), - (0xFA1E, 'M', u'羽'), - (0xFA1F, 'V'), - (0xFA20, 'M', u'蘒'), - (0xFA21, 'V'), - (0xFA22, 'M', u'諸'), - (0xFA23, 'V'), - (0xFA25, 'M', u'逸'), - (0xFA26, 'M', u'都'), - (0xFA27, 'V'), - (0xFA2A, 'M', u'飯'), - (0xFA2B, 'M', u'飼'), - (0xFA2C, 'M', u'館'), - (0xFA2D, 'M', u'鶴'), - (0xFA2E, 'M', u'郞'), - (0xFA2F, 'M', u'隷'), - (0xFA30, 'M', u'侮'), - (0xFA31, 'M', u'僧'), - (0xFA32, 'M', u'免'), - (0xFA33, 'M', u'勉'), - (0xFA34, 'M', u'勤'), - (0xFA35, 'M', u'卑'), - (0xFA36, 'M', u'喝'), - (0xFA37, 'M', u'嘆'), - (0xFA38, 'M', u'器'), - (0xFA39, 'M', u'塀'), - (0xFA3A, 'M', u'墨'), - (0xFA3B, 'M', u'層'), - (0xFA3C, 'M', u'屮'), - (0xFA3D, 'M', u'悔'), - (0xFA3E, 'M', u'慨'), - (0xFA3F, 'M', u'憎'), - (0xFA40, 'M', u'懲'), - (0xFA41, 'M', u'敏'), - (0xFA42, 'M', u'既'), - (0xFA43, 'M', u'暑'), - (0xFA44, 'M', u'梅'), - (0xFA45, 'M', u'海'), - (0xFA46, 'M', u'渚'), - ] - -def _seg_42(): - return [ - (0xFA47, 'M', u'漢'), - (0xFA48, 'M', u'煮'), - (0xFA49, 'M', u'爫'), - (0xFA4A, 'M', u'琢'), - (0xFA4B, 'M', u'碑'), - (0xFA4C, 'M', u'社'), - (0xFA4D, 'M', u'祉'), - (0xFA4E, 'M', u'祈'), - (0xFA4F, 'M', u'祐'), - (0xFA50, 'M', u'祖'), - (0xFA51, 'M', u'祝'), - (0xFA52, 'M', u'禍'), - (0xFA53, 'M', u'禎'), - (0xFA54, 'M', u'穀'), - (0xFA55, 'M', u'突'), - (0xFA56, 'M', u'節'), - (0xFA57, 'M', u'練'), - (0xFA58, 'M', u'縉'), - (0xFA59, 'M', u'繁'), - (0xFA5A, 'M', u'署'), - (0xFA5B, 'M', u'者'), - (0xFA5C, 'M', u'臭'), - (0xFA5D, 'M', u'艹'), - (0xFA5F, 'M', u'著'), - (0xFA60, 'M', u'褐'), - (0xFA61, 'M', u'視'), - (0xFA62, 'M', u'謁'), - (0xFA63, 'M', u'謹'), - (0xFA64, 'M', u'賓'), - (0xFA65, 'M', u'贈'), - (0xFA66, 'M', u'辶'), - (0xFA67, 'M', u'逸'), - (0xFA68, 'M', u'難'), - (0xFA69, 'M', u'響'), - (0xFA6A, 'M', u'頻'), - (0xFA6B, 'M', u'恵'), - (0xFA6C, 'M', u'𤋮'), - (0xFA6D, 'M', u'舘'), - (0xFA6E, 'X'), - (0xFA70, 'M', u'並'), - (0xFA71, 'M', u'况'), - (0xFA72, 'M', u'全'), - (0xFA73, 'M', u'侀'), - (0xFA74, 'M', u'充'), - (0xFA75, 'M', u'冀'), - (0xFA76, 'M', u'勇'), - (0xFA77, 'M', u'勺'), - (0xFA78, 'M', u'喝'), - (0xFA79, 'M', u'啕'), - (0xFA7A, 'M', u'喙'), - (0xFA7B, 'M', u'嗢'), - (0xFA7C, 'M', u'塚'), - (0xFA7D, 'M', u'墳'), - (0xFA7E, 'M', u'奄'), - (0xFA7F, 'M', u'奔'), - (0xFA80, 'M', u'婢'), - (0xFA81, 'M', u'嬨'), - (0xFA82, 'M', u'廒'), - (0xFA83, 'M', u'廙'), - (0xFA84, 'M', u'彩'), - (0xFA85, 'M', u'徭'), - (0xFA86, 'M', u'惘'), - (0xFA87, 'M', u'慎'), - (0xFA88, 'M', u'愈'), - (0xFA89, 'M', u'憎'), - (0xFA8A, 'M', u'慠'), - (0xFA8B, 'M', u'懲'), - (0xFA8C, 'M', u'戴'), - (0xFA8D, 'M', u'揄'), - (0xFA8E, 'M', u'搜'), - (0xFA8F, 'M', u'摒'), - (0xFA90, 'M', u'敖'), - (0xFA91, 'M', u'晴'), - (0xFA92, 'M', u'朗'), - (0xFA93, 'M', u'望'), - (0xFA94, 'M', u'杖'), - (0xFA95, 'M', u'歹'), - (0xFA96, 'M', u'殺'), - (0xFA97, 'M', u'流'), - (0xFA98, 'M', u'滛'), - (0xFA99, 'M', u'滋'), - (0xFA9A, 'M', u'漢'), - (0xFA9B, 'M', u'瀞'), - (0xFA9C, 'M', u'煮'), - (0xFA9D, 'M', u'瞧'), - (0xFA9E, 'M', u'爵'), - (0xFA9F, 'M', u'犯'), - (0xFAA0, 'M', u'猪'), - (0xFAA1, 'M', u'瑱'), - (0xFAA2, 'M', u'甆'), - (0xFAA3, 'M', u'画'), - (0xFAA4, 'M', u'瘝'), - (0xFAA5, 'M', u'瘟'), - (0xFAA6, 'M', u'益'), - (0xFAA7, 'M', u'盛'), - (0xFAA8, 'M', u'直'), - (0xFAA9, 'M', u'睊'), - (0xFAAA, 'M', u'着'), - (0xFAAB, 'M', u'磌'), - (0xFAAC, 'M', u'窱'), - ] - -def _seg_43(): - return [ - (0xFAAD, 'M', u'節'), - (0xFAAE, 'M', u'类'), - (0xFAAF, 'M', u'絛'), - (0xFAB0, 'M', u'練'), - (0xFAB1, 'M', u'缾'), - (0xFAB2, 'M', u'者'), - (0xFAB3, 'M', u'荒'), - (0xFAB4, 'M', u'華'), - (0xFAB5, 'M', u'蝹'), - (0xFAB6, 'M', u'襁'), - (0xFAB7, 'M', u'覆'), - (0xFAB8, 'M', u'視'), - (0xFAB9, 'M', u'調'), - (0xFABA, 'M', u'諸'), - (0xFABB, 'M', u'請'), - (0xFABC, 'M', u'謁'), - (0xFABD, 'M', u'諾'), - (0xFABE, 'M', u'諭'), - (0xFABF, 'M', u'謹'), - (0xFAC0, 'M', u'變'), - (0xFAC1, 'M', u'贈'), - (0xFAC2, 'M', u'輸'), - (0xFAC3, 'M', u'遲'), - (0xFAC4, 'M', u'醙'), - (0xFAC5, 'M', u'鉶'), - (0xFAC6, 'M', u'陼'), - (0xFAC7, 'M', u'難'), - (0xFAC8, 'M', u'靖'), - (0xFAC9, 'M', u'韛'), - (0xFACA, 'M', u'響'), - (0xFACB, 'M', u'頋'), - (0xFACC, 'M', u'頻'), - (0xFACD, 'M', u'鬒'), - (0xFACE, 'M', u'龜'), - (0xFACF, 'M', u'𢡊'), - (0xFAD0, 'M', u'𢡄'), - (0xFAD1, 'M', u'𣏕'), - (0xFAD2, 'M', u'㮝'), - (0xFAD3, 'M', u'䀘'), - (0xFAD4, 'M', u'䀹'), - (0xFAD5, 'M', u'𥉉'), - (0xFAD6, 'M', u'𥳐'), - (0xFAD7, 'M', u'𧻓'), - (0xFAD8, 'M', u'齃'), - (0xFAD9, 'M', u'龎'), - (0xFADA, 'X'), - (0xFB00, 'M', u'ff'), - (0xFB01, 'M', u'fi'), - (0xFB02, 'M', u'fl'), - (0xFB03, 'M', u'ffi'), - (0xFB04, 'M', u'ffl'), - (0xFB05, 'M', u'st'), - (0xFB07, 'X'), - (0xFB13, 'M', u'մն'), - (0xFB14, 'M', u'մե'), - (0xFB15, 'M', u'մի'), - (0xFB16, 'M', u'վն'), - (0xFB17, 'M', u'մխ'), - (0xFB18, 'X'), - (0xFB1D, 'M', u'יִ'), - (0xFB1E, 'V'), - (0xFB1F, 'M', u'ײַ'), - (0xFB20, 'M', u'ע'), - (0xFB21, 'M', u'א'), - (0xFB22, 'M', u'ד'), - (0xFB23, 'M', u'ה'), - (0xFB24, 'M', u'כ'), - (0xFB25, 'M', u'ל'), - (0xFB26, 'M', u'ם'), - (0xFB27, 'M', u'ר'), - (0xFB28, 'M', u'ת'), - (0xFB29, '3', u'+'), - (0xFB2A, 'M', u'שׁ'), - (0xFB2B, 'M', u'שׂ'), - (0xFB2C, 'M', u'שּׁ'), - (0xFB2D, 'M', u'שּׂ'), - (0xFB2E, 'M', u'אַ'), - (0xFB2F, 'M', u'אָ'), - (0xFB30, 'M', u'אּ'), - (0xFB31, 'M', u'בּ'), - (0xFB32, 'M', u'גּ'), - (0xFB33, 'M', u'דּ'), - (0xFB34, 'M', u'הּ'), - (0xFB35, 'M', u'וּ'), - (0xFB36, 'M', u'זּ'), - (0xFB37, 'X'), - (0xFB38, 'M', u'טּ'), - (0xFB39, 'M', u'יּ'), - (0xFB3A, 'M', u'ךּ'), - (0xFB3B, 'M', u'כּ'), - (0xFB3C, 'M', u'לּ'), - (0xFB3D, 'X'), - (0xFB3E, 'M', u'מּ'), - (0xFB3F, 'X'), - (0xFB40, 'M', u'נּ'), - (0xFB41, 'M', u'סּ'), - (0xFB42, 'X'), - (0xFB43, 'M', u'ףּ'), - (0xFB44, 'M', u'פּ'), - (0xFB45, 'X'), - ] - -def _seg_44(): - return [ - (0xFB46, 'M', u'צּ'), - (0xFB47, 'M', u'קּ'), - (0xFB48, 'M', u'רּ'), - (0xFB49, 'M', u'שּ'), - (0xFB4A, 'M', u'תּ'), - (0xFB4B, 'M', u'וֹ'), - (0xFB4C, 'M', u'בֿ'), - (0xFB4D, 'M', u'כֿ'), - (0xFB4E, 'M', u'פֿ'), - (0xFB4F, 'M', u'אל'), - (0xFB50, 'M', u'ٱ'), - (0xFB52, 'M', u'ٻ'), - (0xFB56, 'M', u'پ'), - (0xFB5A, 'M', u'ڀ'), - (0xFB5E, 'M', u'ٺ'), - (0xFB62, 'M', u'ٿ'), - (0xFB66, 'M', u'ٹ'), - (0xFB6A, 'M', u'ڤ'), - (0xFB6E, 'M', u'ڦ'), - (0xFB72, 'M', u'ڄ'), - (0xFB76, 'M', u'ڃ'), - (0xFB7A, 'M', u'چ'), - (0xFB7E, 'M', u'ڇ'), - (0xFB82, 'M', u'ڍ'), - (0xFB84, 'M', u'ڌ'), - (0xFB86, 'M', u'ڎ'), - (0xFB88, 'M', u'ڈ'), - (0xFB8A, 'M', u'ژ'), - (0xFB8C, 'M', u'ڑ'), - (0xFB8E, 'M', u'ک'), - (0xFB92, 'M', u'گ'), - (0xFB96, 'M', u'ڳ'), - (0xFB9A, 'M', u'ڱ'), - (0xFB9E, 'M', u'ں'), - (0xFBA0, 'M', u'ڻ'), - (0xFBA4, 'M', u'ۀ'), - (0xFBA6, 'M', u'ہ'), - (0xFBAA, 'M', u'ھ'), - (0xFBAE, 'M', u'ے'), - (0xFBB0, 'M', u'ۓ'), - (0xFBB2, 'V'), - (0xFBC2, 'X'), - (0xFBD3, 'M', u'ڭ'), - (0xFBD7, 'M', u'ۇ'), - (0xFBD9, 'M', u'ۆ'), - (0xFBDB, 'M', u'ۈ'), - (0xFBDD, 'M', u'ۇٴ'), - (0xFBDE, 'M', u'ۋ'), - (0xFBE0, 'M', u'ۅ'), - (0xFBE2, 'M', u'ۉ'), - (0xFBE4, 'M', u'ې'), - (0xFBE8, 'M', u'ى'), - (0xFBEA, 'M', u'ئا'), - (0xFBEC, 'M', u'ئە'), - (0xFBEE, 'M', u'ئو'), - (0xFBF0, 'M', u'ئۇ'), - (0xFBF2, 'M', u'ئۆ'), - (0xFBF4, 'M', u'ئۈ'), - (0xFBF6, 'M', u'ئې'), - (0xFBF9, 'M', u'ئى'), - (0xFBFC, 'M', u'ی'), - (0xFC00, 'M', u'ئج'), - (0xFC01, 'M', u'ئح'), - (0xFC02, 'M', u'ئم'), - (0xFC03, 'M', u'ئى'), - (0xFC04, 'M', u'ئي'), - (0xFC05, 'M', u'بج'), - (0xFC06, 'M', u'بح'), - (0xFC07, 'M', u'بخ'), - (0xFC08, 'M', u'بم'), - (0xFC09, 'M', u'بى'), - (0xFC0A, 'M', u'بي'), - (0xFC0B, 'M', u'تج'), - (0xFC0C, 'M', u'تح'), - (0xFC0D, 'M', u'تخ'), - (0xFC0E, 'M', u'تم'), - (0xFC0F, 'M', u'تى'), - (0xFC10, 'M', u'تي'), - (0xFC11, 'M', u'ثج'), - (0xFC12, 'M', u'ثم'), - (0xFC13, 'M', u'ثى'), - (0xFC14, 'M', u'ثي'), - (0xFC15, 'M', u'جح'), - (0xFC16, 'M', u'جم'), - (0xFC17, 'M', u'حج'), - (0xFC18, 'M', u'حم'), - (0xFC19, 'M', u'خج'), - (0xFC1A, 'M', u'خح'), - (0xFC1B, 'M', u'خم'), - (0xFC1C, 'M', u'سج'), - (0xFC1D, 'M', u'سح'), - (0xFC1E, 'M', u'سخ'), - (0xFC1F, 'M', u'سم'), - (0xFC20, 'M', u'صح'), - (0xFC21, 'M', u'صم'), - (0xFC22, 'M', u'ضج'), - (0xFC23, 'M', u'ضح'), - (0xFC24, 'M', u'ضخ'), - (0xFC25, 'M', u'ضم'), - (0xFC26, 'M', u'طح'), - ] - -def _seg_45(): - return [ - (0xFC27, 'M', u'طم'), - (0xFC28, 'M', u'ظم'), - (0xFC29, 'M', u'عج'), - (0xFC2A, 'M', u'عم'), - (0xFC2B, 'M', u'غج'), - (0xFC2C, 'M', u'غم'), - (0xFC2D, 'M', u'فج'), - (0xFC2E, 'M', u'فح'), - (0xFC2F, 'M', u'فخ'), - (0xFC30, 'M', u'فم'), - (0xFC31, 'M', u'فى'), - (0xFC32, 'M', u'في'), - (0xFC33, 'M', u'قح'), - (0xFC34, 'M', u'قم'), - (0xFC35, 'M', u'قى'), - (0xFC36, 'M', u'قي'), - (0xFC37, 'M', u'كا'), - (0xFC38, 'M', u'كج'), - (0xFC39, 'M', u'كح'), - (0xFC3A, 'M', u'كخ'), - (0xFC3B, 'M', u'كل'), - (0xFC3C, 'M', u'كم'), - (0xFC3D, 'M', u'كى'), - (0xFC3E, 'M', u'كي'), - (0xFC3F, 'M', u'لج'), - (0xFC40, 'M', u'لح'), - (0xFC41, 'M', u'لخ'), - (0xFC42, 'M', u'لم'), - (0xFC43, 'M', u'لى'), - (0xFC44, 'M', u'لي'), - (0xFC45, 'M', u'مج'), - (0xFC46, 'M', u'مح'), - (0xFC47, 'M', u'مخ'), - (0xFC48, 'M', u'مم'), - (0xFC49, 'M', u'مى'), - (0xFC4A, 'M', u'مي'), - (0xFC4B, 'M', u'نج'), - (0xFC4C, 'M', u'نح'), - (0xFC4D, 'M', u'نخ'), - (0xFC4E, 'M', u'نم'), - (0xFC4F, 'M', u'نى'), - (0xFC50, 'M', u'ني'), - (0xFC51, 'M', u'هج'), - (0xFC52, 'M', u'هم'), - (0xFC53, 'M', u'هى'), - (0xFC54, 'M', u'هي'), - (0xFC55, 'M', u'يج'), - (0xFC56, 'M', u'يح'), - (0xFC57, 'M', u'يخ'), - (0xFC58, 'M', u'يم'), - (0xFC59, 'M', u'يى'), - (0xFC5A, 'M', u'يي'), - (0xFC5B, 'M', u'ذٰ'), - (0xFC5C, 'M', u'رٰ'), - (0xFC5D, 'M', u'ىٰ'), - (0xFC5E, '3', u' ٌّ'), - (0xFC5F, '3', u' ٍّ'), - (0xFC60, '3', u' َّ'), - (0xFC61, '3', u' ُّ'), - (0xFC62, '3', u' ِّ'), - (0xFC63, '3', u' ّٰ'), - (0xFC64, 'M', u'ئر'), - (0xFC65, 'M', u'ئز'), - (0xFC66, 'M', u'ئم'), - (0xFC67, 'M', u'ئن'), - (0xFC68, 'M', u'ئى'), - (0xFC69, 'M', u'ئي'), - (0xFC6A, 'M', u'بر'), - (0xFC6B, 'M', u'بز'), - (0xFC6C, 'M', u'بم'), - (0xFC6D, 'M', u'بن'), - (0xFC6E, 'M', u'بى'), - (0xFC6F, 'M', u'بي'), - (0xFC70, 'M', u'تر'), - (0xFC71, 'M', u'تز'), - (0xFC72, 'M', u'تم'), - (0xFC73, 'M', u'تن'), - (0xFC74, 'M', u'تى'), - (0xFC75, 'M', u'تي'), - (0xFC76, 'M', u'ثر'), - (0xFC77, 'M', u'ثز'), - (0xFC78, 'M', u'ثم'), - (0xFC79, 'M', u'ثن'), - (0xFC7A, 'M', u'ثى'), - (0xFC7B, 'M', u'ثي'), - (0xFC7C, 'M', u'فى'), - (0xFC7D, 'M', u'في'), - (0xFC7E, 'M', u'قى'), - (0xFC7F, 'M', u'قي'), - (0xFC80, 'M', u'كا'), - (0xFC81, 'M', u'كل'), - (0xFC82, 'M', u'كم'), - (0xFC83, 'M', u'كى'), - (0xFC84, 'M', u'كي'), - (0xFC85, 'M', u'لم'), - (0xFC86, 'M', u'لى'), - (0xFC87, 'M', u'لي'), - (0xFC88, 'M', u'ما'), - (0xFC89, 'M', u'مم'), - (0xFC8A, 'M', u'نر'), - ] - -def _seg_46(): - return [ - (0xFC8B, 'M', u'نز'), - (0xFC8C, 'M', u'نم'), - (0xFC8D, 'M', u'نن'), - (0xFC8E, 'M', u'نى'), - (0xFC8F, 'M', u'ني'), - (0xFC90, 'M', u'ىٰ'), - (0xFC91, 'M', u'ير'), - (0xFC92, 'M', u'يز'), - (0xFC93, 'M', u'يم'), - (0xFC94, 'M', u'ين'), - (0xFC95, 'M', u'يى'), - (0xFC96, 'M', u'يي'), - (0xFC97, 'M', u'ئج'), - (0xFC98, 'M', u'ئح'), - (0xFC99, 'M', u'ئخ'), - (0xFC9A, 'M', u'ئم'), - (0xFC9B, 'M', u'ئه'), - (0xFC9C, 'M', u'بج'), - (0xFC9D, 'M', u'بح'), - (0xFC9E, 'M', u'بخ'), - (0xFC9F, 'M', u'بم'), - (0xFCA0, 'M', u'به'), - (0xFCA1, 'M', u'تج'), - (0xFCA2, 'M', u'تح'), - (0xFCA3, 'M', u'تخ'), - (0xFCA4, 'M', u'تم'), - (0xFCA5, 'M', u'ته'), - (0xFCA6, 'M', u'ثم'), - (0xFCA7, 'M', u'جح'), - (0xFCA8, 'M', u'جم'), - (0xFCA9, 'M', u'حج'), - (0xFCAA, 'M', u'حم'), - (0xFCAB, 'M', u'خج'), - (0xFCAC, 'M', u'خم'), - (0xFCAD, 'M', u'سج'), - (0xFCAE, 'M', u'سح'), - (0xFCAF, 'M', u'سخ'), - (0xFCB0, 'M', u'سم'), - (0xFCB1, 'M', u'صح'), - (0xFCB2, 'M', u'صخ'), - (0xFCB3, 'M', u'صم'), - (0xFCB4, 'M', u'ضج'), - (0xFCB5, 'M', u'ضح'), - (0xFCB6, 'M', u'ضخ'), - (0xFCB7, 'M', u'ضم'), - (0xFCB8, 'M', u'طح'), - (0xFCB9, 'M', u'ظم'), - (0xFCBA, 'M', u'عج'), - (0xFCBB, 'M', u'عم'), - (0xFCBC, 'M', u'غج'), - (0xFCBD, 'M', u'غم'), - (0xFCBE, 'M', u'فج'), - (0xFCBF, 'M', u'فح'), - (0xFCC0, 'M', u'فخ'), - (0xFCC1, 'M', u'فم'), - (0xFCC2, 'M', u'قح'), - (0xFCC3, 'M', u'قم'), - (0xFCC4, 'M', u'كج'), - (0xFCC5, 'M', u'كح'), - (0xFCC6, 'M', u'كخ'), - (0xFCC7, 'M', u'كل'), - (0xFCC8, 'M', u'كم'), - (0xFCC9, 'M', u'لج'), - (0xFCCA, 'M', u'لح'), - (0xFCCB, 'M', u'لخ'), - (0xFCCC, 'M', u'لم'), - (0xFCCD, 'M', u'له'), - (0xFCCE, 'M', u'مج'), - (0xFCCF, 'M', u'مح'), - (0xFCD0, 'M', u'مخ'), - (0xFCD1, 'M', u'مم'), - (0xFCD2, 'M', u'نج'), - (0xFCD3, 'M', u'نح'), - (0xFCD4, 'M', u'نخ'), - (0xFCD5, 'M', u'نم'), - (0xFCD6, 'M', u'نه'), - (0xFCD7, 'M', u'هج'), - (0xFCD8, 'M', u'هم'), - (0xFCD9, 'M', u'هٰ'), - (0xFCDA, 'M', u'يج'), - (0xFCDB, 'M', u'يح'), - (0xFCDC, 'M', u'يخ'), - (0xFCDD, 'M', u'يم'), - (0xFCDE, 'M', u'يه'), - (0xFCDF, 'M', u'ئم'), - (0xFCE0, 'M', u'ئه'), - (0xFCE1, 'M', u'بم'), - (0xFCE2, 'M', u'به'), - (0xFCE3, 'M', u'تم'), - (0xFCE4, 'M', u'ته'), - (0xFCE5, 'M', u'ثم'), - (0xFCE6, 'M', u'ثه'), - (0xFCE7, 'M', u'سم'), - (0xFCE8, 'M', u'سه'), - (0xFCE9, 'M', u'شم'), - (0xFCEA, 'M', u'شه'), - (0xFCEB, 'M', u'كل'), - (0xFCEC, 'M', u'كم'), - (0xFCED, 'M', u'لم'), - (0xFCEE, 'M', u'نم'), - ] - -def _seg_47(): - return [ - (0xFCEF, 'M', u'نه'), - (0xFCF0, 'M', u'يم'), - (0xFCF1, 'M', u'يه'), - (0xFCF2, 'M', u'ـَّ'), - (0xFCF3, 'M', u'ـُّ'), - (0xFCF4, 'M', u'ـِّ'), - (0xFCF5, 'M', u'طى'), - (0xFCF6, 'M', u'طي'), - (0xFCF7, 'M', u'عى'), - (0xFCF8, 'M', u'عي'), - (0xFCF9, 'M', u'غى'), - (0xFCFA, 'M', u'غي'), - (0xFCFB, 'M', u'سى'), - (0xFCFC, 'M', u'سي'), - (0xFCFD, 'M', u'شى'), - (0xFCFE, 'M', u'شي'), - (0xFCFF, 'M', u'حى'), - (0xFD00, 'M', u'حي'), - (0xFD01, 'M', u'جى'), - (0xFD02, 'M', u'جي'), - (0xFD03, 'M', u'خى'), - (0xFD04, 'M', u'خي'), - (0xFD05, 'M', u'صى'), - (0xFD06, 'M', u'صي'), - (0xFD07, 'M', u'ضى'), - (0xFD08, 'M', u'ضي'), - (0xFD09, 'M', u'شج'), - (0xFD0A, 'M', u'شح'), - (0xFD0B, 'M', u'شخ'), - (0xFD0C, 'M', u'شم'), - (0xFD0D, 'M', u'شر'), - (0xFD0E, 'M', u'سر'), - (0xFD0F, 'M', u'صر'), - (0xFD10, 'M', u'ضر'), - (0xFD11, 'M', u'طى'), - (0xFD12, 'M', u'طي'), - (0xFD13, 'M', u'عى'), - (0xFD14, 'M', u'عي'), - (0xFD15, 'M', u'غى'), - (0xFD16, 'M', u'غي'), - (0xFD17, 'M', u'سى'), - (0xFD18, 'M', u'سي'), - (0xFD19, 'M', u'شى'), - (0xFD1A, 'M', u'شي'), - (0xFD1B, 'M', u'حى'), - (0xFD1C, 'M', u'حي'), - (0xFD1D, 'M', u'جى'), - (0xFD1E, 'M', u'جي'), - (0xFD1F, 'M', u'خى'), - (0xFD20, 'M', u'خي'), - (0xFD21, 'M', u'صى'), - (0xFD22, 'M', u'صي'), - (0xFD23, 'M', u'ضى'), - (0xFD24, 'M', u'ضي'), - (0xFD25, 'M', u'شج'), - (0xFD26, 'M', u'شح'), - (0xFD27, 'M', u'شخ'), - (0xFD28, 'M', u'شم'), - (0xFD29, 'M', u'شر'), - (0xFD2A, 'M', u'سر'), - (0xFD2B, 'M', u'صر'), - (0xFD2C, 'M', u'ضر'), - (0xFD2D, 'M', u'شج'), - (0xFD2E, 'M', u'شح'), - (0xFD2F, 'M', u'شخ'), - (0xFD30, 'M', u'شم'), - (0xFD31, 'M', u'سه'), - (0xFD32, 'M', u'شه'), - (0xFD33, 'M', u'طم'), - (0xFD34, 'M', u'سج'), - (0xFD35, 'M', u'سح'), - (0xFD36, 'M', u'سخ'), - (0xFD37, 'M', u'شج'), - (0xFD38, 'M', u'شح'), - (0xFD39, 'M', u'شخ'), - (0xFD3A, 'M', u'طم'), - (0xFD3B, 'M', u'ظم'), - (0xFD3C, 'M', u'اً'), - (0xFD3E, 'V'), - (0xFD40, 'X'), - (0xFD50, 'M', u'تجم'), - (0xFD51, 'M', u'تحج'), - (0xFD53, 'M', u'تحم'), - (0xFD54, 'M', u'تخم'), - (0xFD55, 'M', u'تمج'), - (0xFD56, 'M', u'تمح'), - (0xFD57, 'M', u'تمخ'), - (0xFD58, 'M', u'جمح'), - (0xFD5A, 'M', u'حمي'), - (0xFD5B, 'M', u'حمى'), - (0xFD5C, 'M', u'سحج'), - (0xFD5D, 'M', u'سجح'), - (0xFD5E, 'M', u'سجى'), - (0xFD5F, 'M', u'سمح'), - (0xFD61, 'M', u'سمج'), - (0xFD62, 'M', u'سمم'), - (0xFD64, 'M', u'صحح'), - (0xFD66, 'M', u'صمم'), - (0xFD67, 'M', u'شحم'), - (0xFD69, 'M', u'شجي'), - ] - -def _seg_48(): - return [ - (0xFD6A, 'M', u'شمخ'), - (0xFD6C, 'M', u'شمم'), - (0xFD6E, 'M', u'ضحى'), - (0xFD6F, 'M', u'ضخم'), - (0xFD71, 'M', u'طمح'), - (0xFD73, 'M', u'طمم'), - (0xFD74, 'M', u'طمي'), - (0xFD75, 'M', u'عجم'), - (0xFD76, 'M', u'عمم'), - (0xFD78, 'M', u'عمى'), - (0xFD79, 'M', u'غمم'), - (0xFD7A, 'M', u'غمي'), - (0xFD7B, 'M', u'غمى'), - (0xFD7C, 'M', u'فخم'), - (0xFD7E, 'M', u'قمح'), - (0xFD7F, 'M', u'قمم'), - (0xFD80, 'M', u'لحم'), - (0xFD81, 'M', u'لحي'), - (0xFD82, 'M', u'لحى'), - (0xFD83, 'M', u'لجج'), - (0xFD85, 'M', u'لخم'), - (0xFD87, 'M', u'لمح'), - (0xFD89, 'M', u'محج'), - (0xFD8A, 'M', u'محم'), - (0xFD8B, 'M', u'محي'), - (0xFD8C, 'M', u'مجح'), - (0xFD8D, 'M', u'مجم'), - (0xFD8E, 'M', u'مخج'), - (0xFD8F, 'M', u'مخم'), - (0xFD90, 'X'), - (0xFD92, 'M', u'مجخ'), - (0xFD93, 'M', u'همج'), - (0xFD94, 'M', u'همم'), - (0xFD95, 'M', u'نحم'), - (0xFD96, 'M', u'نحى'), - (0xFD97, 'M', u'نجم'), - (0xFD99, 'M', u'نجى'), - (0xFD9A, 'M', u'نمي'), - (0xFD9B, 'M', u'نمى'), - (0xFD9C, 'M', u'يمم'), - (0xFD9E, 'M', u'بخي'), - (0xFD9F, 'M', u'تجي'), - (0xFDA0, 'M', u'تجى'), - (0xFDA1, 'M', u'تخي'), - (0xFDA2, 'M', u'تخى'), - (0xFDA3, 'M', u'تمي'), - (0xFDA4, 'M', u'تمى'), - (0xFDA5, 'M', u'جمي'), - (0xFDA6, 'M', u'جحى'), - (0xFDA7, 'M', u'جمى'), - (0xFDA8, 'M', u'سخى'), - (0xFDA9, 'M', u'صحي'), - (0xFDAA, 'M', u'شحي'), - (0xFDAB, 'M', u'ضحي'), - (0xFDAC, 'M', u'لجي'), - (0xFDAD, 'M', u'لمي'), - (0xFDAE, 'M', u'يحي'), - (0xFDAF, 'M', u'يجي'), - (0xFDB0, 'M', u'يمي'), - (0xFDB1, 'M', u'ممي'), - (0xFDB2, 'M', u'قمي'), - (0xFDB3, 'M', u'نحي'), - (0xFDB4, 'M', u'قمح'), - (0xFDB5, 'M', u'لحم'), - (0xFDB6, 'M', u'عمي'), - (0xFDB7, 'M', u'كمي'), - (0xFDB8, 'M', u'نجح'), - (0xFDB9, 'M', u'مخي'), - (0xFDBA, 'M', u'لجم'), - (0xFDBB, 'M', u'كمم'), - (0xFDBC, 'M', u'لجم'), - (0xFDBD, 'M', u'نجح'), - (0xFDBE, 'M', u'جحي'), - (0xFDBF, 'M', u'حجي'), - (0xFDC0, 'M', u'مجي'), - (0xFDC1, 'M', u'فمي'), - (0xFDC2, 'M', u'بحي'), - (0xFDC3, 'M', u'كمم'), - (0xFDC4, 'M', u'عجم'), - (0xFDC5, 'M', u'صمم'), - (0xFDC6, 'M', u'سخي'), - (0xFDC7, 'M', u'نجي'), - (0xFDC8, 'X'), - (0xFDF0, 'M', u'صلے'), - (0xFDF1, 'M', u'قلے'), - (0xFDF2, 'M', u'الله'), - (0xFDF3, 'M', u'اكبر'), - (0xFDF4, 'M', u'محمد'), - (0xFDF5, 'M', u'صلعم'), - (0xFDF6, 'M', u'رسول'), - (0xFDF7, 'M', u'عليه'), - (0xFDF8, 'M', u'وسلم'), - (0xFDF9, 'M', u'صلى'), - (0xFDFA, '3', u'صلى الله عليه وسلم'), - (0xFDFB, '3', u'جل جلاله'), - (0xFDFC, 'M', u'ریال'), - (0xFDFD, 'V'), - (0xFDFE, 'X'), - (0xFE00, 'I'), - (0xFE10, '3', u','), - ] - -def _seg_49(): - return [ - (0xFE11, 'M', u'、'), - (0xFE12, 'X'), - (0xFE13, '3', u':'), - (0xFE14, '3', u';'), - (0xFE15, '3', u'!'), - (0xFE16, '3', u'?'), - (0xFE17, 'M', u'〖'), - (0xFE18, 'M', u'〗'), - (0xFE19, 'X'), - (0xFE20, 'V'), - (0xFE30, 'X'), - (0xFE31, 'M', u'—'), - (0xFE32, 'M', u'–'), - (0xFE33, '3', u'_'), - (0xFE35, '3', u'('), - (0xFE36, '3', u')'), - (0xFE37, '3', u'{'), - (0xFE38, '3', u'}'), - (0xFE39, 'M', u'〔'), - (0xFE3A, 'M', u'〕'), - (0xFE3B, 'M', u'【'), - (0xFE3C, 'M', u'】'), - (0xFE3D, 'M', u'《'), - (0xFE3E, 'M', u'》'), - (0xFE3F, 'M', u'〈'), - (0xFE40, 'M', u'〉'), - (0xFE41, 'M', u'「'), - (0xFE42, 'M', u'」'), - (0xFE43, 'M', u'『'), - (0xFE44, 'M', u'』'), - (0xFE45, 'V'), - (0xFE47, '3', u'['), - (0xFE48, '3', u']'), - (0xFE49, '3', u' ̅'), - (0xFE4D, '3', u'_'), - (0xFE50, '3', u','), - (0xFE51, 'M', u'、'), - (0xFE52, 'X'), - (0xFE54, '3', u';'), - (0xFE55, '3', u':'), - (0xFE56, '3', u'?'), - (0xFE57, '3', u'!'), - (0xFE58, 'M', u'—'), - (0xFE59, '3', u'('), - (0xFE5A, '3', u')'), - (0xFE5B, '3', u'{'), - (0xFE5C, '3', u'}'), - (0xFE5D, 'M', u'〔'), - (0xFE5E, 'M', u'〕'), - (0xFE5F, '3', u'#'), - (0xFE60, '3', u'&'), - (0xFE61, '3', u'*'), - (0xFE62, '3', u'+'), - (0xFE63, 'M', u'-'), - (0xFE64, '3', u'<'), - (0xFE65, '3', u'>'), - (0xFE66, '3', u'='), - (0xFE67, 'X'), - (0xFE68, '3', u'\\'), - (0xFE69, '3', u'$'), - (0xFE6A, '3', u'%'), - (0xFE6B, '3', u'@'), - (0xFE6C, 'X'), - (0xFE70, '3', u' ً'), - (0xFE71, 'M', u'ـً'), - (0xFE72, '3', u' ٌ'), - (0xFE73, 'V'), - (0xFE74, '3', u' ٍ'), - (0xFE75, 'X'), - (0xFE76, '3', u' َ'), - (0xFE77, 'M', u'ـَ'), - (0xFE78, '3', u' ُ'), - (0xFE79, 'M', u'ـُ'), - (0xFE7A, '3', u' ِ'), - (0xFE7B, 'M', u'ـِ'), - (0xFE7C, '3', u' ّ'), - (0xFE7D, 'M', u'ـّ'), - (0xFE7E, '3', u' ْ'), - (0xFE7F, 'M', u'ـْ'), - (0xFE80, 'M', u'ء'), - (0xFE81, 'M', u'آ'), - (0xFE83, 'M', u'أ'), - (0xFE85, 'M', u'ؤ'), - (0xFE87, 'M', u'إ'), - (0xFE89, 'M', u'ئ'), - (0xFE8D, 'M', u'ا'), - (0xFE8F, 'M', u'ب'), - (0xFE93, 'M', u'ة'), - (0xFE95, 'M', u'ت'), - (0xFE99, 'M', u'ث'), - (0xFE9D, 'M', u'ج'), - (0xFEA1, 'M', u'ح'), - (0xFEA5, 'M', u'خ'), - (0xFEA9, 'M', u'د'), - (0xFEAB, 'M', u'ذ'), - (0xFEAD, 'M', u'ر'), - (0xFEAF, 'M', u'ز'), - (0xFEB1, 'M', u'س'), - (0xFEB5, 'M', u'ش'), - (0xFEB9, 'M', u'ص'), - ] - -def _seg_50(): - return [ - (0xFEBD, 'M', u'ض'), - (0xFEC1, 'M', u'ط'), - (0xFEC5, 'M', u'ظ'), - (0xFEC9, 'M', u'ع'), - (0xFECD, 'M', u'غ'), - (0xFED1, 'M', u'ف'), - (0xFED5, 'M', u'ق'), - (0xFED9, 'M', u'ك'), - (0xFEDD, 'M', u'ل'), - (0xFEE1, 'M', u'م'), - (0xFEE5, 'M', u'ن'), - (0xFEE9, 'M', u'ه'), - (0xFEED, 'M', u'و'), - (0xFEEF, 'M', u'ى'), - (0xFEF1, 'M', u'ي'), - (0xFEF5, 'M', u'لآ'), - (0xFEF7, 'M', u'لأ'), - (0xFEF9, 'M', u'لإ'), - (0xFEFB, 'M', u'لا'), - (0xFEFD, 'X'), - (0xFEFF, 'I'), - (0xFF00, 'X'), - (0xFF01, '3', u'!'), - (0xFF02, '3', u'"'), - (0xFF03, '3', u'#'), - (0xFF04, '3', u'$'), - (0xFF05, '3', u'%'), - (0xFF06, '3', u'&'), - (0xFF07, '3', u'\''), - (0xFF08, '3', u'('), - (0xFF09, '3', u')'), - (0xFF0A, '3', u'*'), - (0xFF0B, '3', u'+'), - (0xFF0C, '3', u','), - (0xFF0D, 'M', u'-'), - (0xFF0E, 'M', u'.'), - (0xFF0F, '3', u'/'), - (0xFF10, 'M', u'0'), - (0xFF11, 'M', u'1'), - (0xFF12, 'M', u'2'), - (0xFF13, 'M', u'3'), - (0xFF14, 'M', u'4'), - (0xFF15, 'M', u'5'), - (0xFF16, 'M', u'6'), - (0xFF17, 'M', u'7'), - (0xFF18, 'M', u'8'), - (0xFF19, 'M', u'9'), - (0xFF1A, '3', u':'), - (0xFF1B, '3', u';'), - (0xFF1C, '3', u'<'), - (0xFF1D, '3', u'='), - (0xFF1E, '3', u'>'), - (0xFF1F, '3', u'?'), - (0xFF20, '3', u'@'), - (0xFF21, 'M', u'a'), - (0xFF22, 'M', u'b'), - (0xFF23, 'M', u'c'), - (0xFF24, 'M', u'd'), - (0xFF25, 'M', u'e'), - (0xFF26, 'M', u'f'), - (0xFF27, 'M', u'g'), - (0xFF28, 'M', u'h'), - (0xFF29, 'M', u'i'), - (0xFF2A, 'M', u'j'), - (0xFF2B, 'M', u'k'), - (0xFF2C, 'M', u'l'), - (0xFF2D, 'M', u'm'), - (0xFF2E, 'M', u'n'), - (0xFF2F, 'M', u'o'), - (0xFF30, 'M', u'p'), - (0xFF31, 'M', u'q'), - (0xFF32, 'M', u'r'), - (0xFF33, 'M', u's'), - (0xFF34, 'M', u't'), - (0xFF35, 'M', u'u'), - (0xFF36, 'M', u'v'), - (0xFF37, 'M', u'w'), - (0xFF38, 'M', u'x'), - (0xFF39, 'M', u'y'), - (0xFF3A, 'M', u'z'), - (0xFF3B, '3', u'['), - (0xFF3C, '3', u'\\'), - (0xFF3D, '3', u']'), - (0xFF3E, '3', u'^'), - (0xFF3F, '3', u'_'), - (0xFF40, '3', u'`'), - (0xFF41, 'M', u'a'), - (0xFF42, 'M', u'b'), - (0xFF43, 'M', u'c'), - (0xFF44, 'M', u'd'), - (0xFF45, 'M', u'e'), - (0xFF46, 'M', u'f'), - (0xFF47, 'M', u'g'), - (0xFF48, 'M', u'h'), - (0xFF49, 'M', u'i'), - (0xFF4A, 'M', u'j'), - (0xFF4B, 'M', u'k'), - (0xFF4C, 'M', u'l'), - (0xFF4D, 'M', u'm'), - (0xFF4E, 'M', u'n'), - ] - -def _seg_51(): - return [ - (0xFF4F, 'M', u'o'), - (0xFF50, 'M', u'p'), - (0xFF51, 'M', u'q'), - (0xFF52, 'M', u'r'), - (0xFF53, 'M', u's'), - (0xFF54, 'M', u't'), - (0xFF55, 'M', u'u'), - (0xFF56, 'M', u'v'), - (0xFF57, 'M', u'w'), - (0xFF58, 'M', u'x'), - (0xFF59, 'M', u'y'), - (0xFF5A, 'M', u'z'), - (0xFF5B, '3', u'{'), - (0xFF5C, '3', u'|'), - (0xFF5D, '3', u'}'), - (0xFF5E, '3', u'~'), - (0xFF5F, 'M', u'⦅'), - (0xFF60, 'M', u'⦆'), - (0xFF61, 'M', u'.'), - (0xFF62, 'M', u'「'), - (0xFF63, 'M', u'」'), - (0xFF64, 'M', u'、'), - (0xFF65, 'M', u'・'), - (0xFF66, 'M', u'ヲ'), - (0xFF67, 'M', u'ァ'), - (0xFF68, 'M', u'ィ'), - (0xFF69, 'M', u'ゥ'), - (0xFF6A, 'M', u'ェ'), - (0xFF6B, 'M', u'ォ'), - (0xFF6C, 'M', u'ャ'), - (0xFF6D, 'M', u'ュ'), - (0xFF6E, 'M', u'ョ'), - (0xFF6F, 'M', u'ッ'), - (0xFF70, 'M', u'ー'), - (0xFF71, 'M', u'ア'), - (0xFF72, 'M', u'イ'), - (0xFF73, 'M', u'ウ'), - (0xFF74, 'M', u'エ'), - (0xFF75, 'M', u'オ'), - (0xFF76, 'M', u'カ'), - (0xFF77, 'M', u'キ'), - (0xFF78, 'M', u'ク'), - (0xFF79, 'M', u'ケ'), - (0xFF7A, 'M', u'コ'), - (0xFF7B, 'M', u'サ'), - (0xFF7C, 'M', u'シ'), - (0xFF7D, 'M', u'ス'), - (0xFF7E, 'M', u'セ'), - (0xFF7F, 'M', u'ソ'), - (0xFF80, 'M', u'タ'), - (0xFF81, 'M', u'チ'), - (0xFF82, 'M', u'ツ'), - (0xFF83, 'M', u'テ'), - (0xFF84, 'M', u'ト'), - (0xFF85, 'M', u'ナ'), - (0xFF86, 'M', u'ニ'), - (0xFF87, 'M', u'ヌ'), - (0xFF88, 'M', u'ネ'), - (0xFF89, 'M', u'ノ'), - (0xFF8A, 'M', u'ハ'), - (0xFF8B, 'M', u'ヒ'), - (0xFF8C, 'M', u'フ'), - (0xFF8D, 'M', u'ヘ'), - (0xFF8E, 'M', u'ホ'), - (0xFF8F, 'M', u'マ'), - (0xFF90, 'M', u'ミ'), - (0xFF91, 'M', u'ム'), - (0xFF92, 'M', u'メ'), - (0xFF93, 'M', u'モ'), - (0xFF94, 'M', u'ヤ'), - (0xFF95, 'M', u'ユ'), - (0xFF96, 'M', u'ヨ'), - (0xFF97, 'M', u'ラ'), - (0xFF98, 'M', u'リ'), - (0xFF99, 'M', u'ル'), - (0xFF9A, 'M', u'レ'), - (0xFF9B, 'M', u'ロ'), - (0xFF9C, 'M', u'ワ'), - (0xFF9D, 'M', u'ン'), - (0xFF9E, 'M', u'゙'), - (0xFF9F, 'M', u'゚'), - (0xFFA0, 'X'), - (0xFFA1, 'M', u'ᄀ'), - (0xFFA2, 'M', u'ᄁ'), - (0xFFA3, 'M', u'ᆪ'), - (0xFFA4, 'M', u'ᄂ'), - (0xFFA5, 'M', u'ᆬ'), - (0xFFA6, 'M', u'ᆭ'), - (0xFFA7, 'M', u'ᄃ'), - (0xFFA8, 'M', u'ᄄ'), - (0xFFA9, 'M', u'ᄅ'), - (0xFFAA, 'M', u'ᆰ'), - (0xFFAB, 'M', u'ᆱ'), - (0xFFAC, 'M', u'ᆲ'), - (0xFFAD, 'M', u'ᆳ'), - (0xFFAE, 'M', u'ᆴ'), - (0xFFAF, 'M', u'ᆵ'), - (0xFFB0, 'M', u'ᄚ'), - (0xFFB1, 'M', u'ᄆ'), - (0xFFB2, 'M', u'ᄇ'), - ] - -def _seg_52(): - return [ - (0xFFB3, 'M', u'ᄈ'), - (0xFFB4, 'M', u'ᄡ'), - (0xFFB5, 'M', u'ᄉ'), - (0xFFB6, 'M', u'ᄊ'), - (0xFFB7, 'M', u'ᄋ'), - (0xFFB8, 'M', u'ᄌ'), - (0xFFB9, 'M', u'ᄍ'), - (0xFFBA, 'M', u'ᄎ'), - (0xFFBB, 'M', u'ᄏ'), - (0xFFBC, 'M', u'ᄐ'), - (0xFFBD, 'M', u'ᄑ'), - (0xFFBE, 'M', u'ᄒ'), - (0xFFBF, 'X'), - (0xFFC2, 'M', u'ᅡ'), - (0xFFC3, 'M', u'ᅢ'), - (0xFFC4, 'M', u'ᅣ'), - (0xFFC5, 'M', u'ᅤ'), - (0xFFC6, 'M', u'ᅥ'), - (0xFFC7, 'M', u'ᅦ'), - (0xFFC8, 'X'), - (0xFFCA, 'M', u'ᅧ'), - (0xFFCB, 'M', u'ᅨ'), - (0xFFCC, 'M', u'ᅩ'), - (0xFFCD, 'M', u'ᅪ'), - (0xFFCE, 'M', u'ᅫ'), - (0xFFCF, 'M', u'ᅬ'), - (0xFFD0, 'X'), - (0xFFD2, 'M', u'ᅭ'), - (0xFFD3, 'M', u'ᅮ'), - (0xFFD4, 'M', u'ᅯ'), - (0xFFD5, 'M', u'ᅰ'), - (0xFFD6, 'M', u'ᅱ'), - (0xFFD7, 'M', u'ᅲ'), - (0xFFD8, 'X'), - (0xFFDA, 'M', u'ᅳ'), - (0xFFDB, 'M', u'ᅴ'), - (0xFFDC, 'M', u'ᅵ'), - (0xFFDD, 'X'), - (0xFFE0, 'M', u'¢'), - (0xFFE1, 'M', u'£'), - (0xFFE2, 'M', u'¬'), - (0xFFE3, '3', u' ̄'), - (0xFFE4, 'M', u'¦'), - (0xFFE5, 'M', u'¥'), - (0xFFE6, 'M', u'₩'), - (0xFFE7, 'X'), - (0xFFE8, 'M', u'│'), - (0xFFE9, 'M', u'←'), - (0xFFEA, 'M', u'↑'), - (0xFFEB, 'M', u'→'), - (0xFFEC, 'M', u'↓'), - (0xFFED, 'M', u'■'), - (0xFFEE, 'M', u'○'), - (0xFFEF, 'X'), - (0x10000, 'V'), - (0x1000C, 'X'), - (0x1000D, 'V'), - (0x10027, 'X'), - (0x10028, 'V'), - (0x1003B, 'X'), - (0x1003C, 'V'), - (0x1003E, 'X'), - (0x1003F, 'V'), - (0x1004E, 'X'), - (0x10050, 'V'), - (0x1005E, 'X'), - (0x10080, 'V'), - (0x100FB, 'X'), - (0x10100, 'V'), - (0x10103, 'X'), - (0x10107, 'V'), - (0x10134, 'X'), - (0x10137, 'V'), - (0x1018F, 'X'), - (0x10190, 'V'), - (0x1019C, 'X'), - (0x101A0, 'V'), - (0x101A1, 'X'), - (0x101D0, 'V'), - (0x101FE, 'X'), - (0x10280, 'V'), - (0x1029D, 'X'), - (0x102A0, 'V'), - (0x102D1, 'X'), - (0x102E0, 'V'), - (0x102FC, 'X'), - (0x10300, 'V'), - (0x10324, 'X'), - (0x1032D, 'V'), - (0x1034B, 'X'), - (0x10350, 'V'), - (0x1037B, 'X'), - (0x10380, 'V'), - (0x1039E, 'X'), - (0x1039F, 'V'), - (0x103C4, 'X'), - (0x103C8, 'V'), - (0x103D6, 'X'), - (0x10400, 'M', u'𐐨'), - (0x10401, 'M', u'𐐩'), - ] - -def _seg_53(): - return [ - (0x10402, 'M', u'𐐪'), - (0x10403, 'M', u'𐐫'), - (0x10404, 'M', u'𐐬'), - (0x10405, 'M', u'𐐭'), - (0x10406, 'M', u'𐐮'), - (0x10407, 'M', u'𐐯'), - (0x10408, 'M', u'𐐰'), - (0x10409, 'M', u'𐐱'), - (0x1040A, 'M', u'𐐲'), - (0x1040B, 'M', u'𐐳'), - (0x1040C, 'M', u'𐐴'), - (0x1040D, 'M', u'𐐵'), - (0x1040E, 'M', u'𐐶'), - (0x1040F, 'M', u'𐐷'), - (0x10410, 'M', u'𐐸'), - (0x10411, 'M', u'𐐹'), - (0x10412, 'M', u'𐐺'), - (0x10413, 'M', u'𐐻'), - (0x10414, 'M', u'𐐼'), - (0x10415, 'M', u'𐐽'), - (0x10416, 'M', u'𐐾'), - (0x10417, 'M', u'𐐿'), - (0x10418, 'M', u'𐑀'), - (0x10419, 'M', u'𐑁'), - (0x1041A, 'M', u'𐑂'), - (0x1041B, 'M', u'𐑃'), - (0x1041C, 'M', u'𐑄'), - (0x1041D, 'M', u'𐑅'), - (0x1041E, 'M', u'𐑆'), - (0x1041F, 'M', u'𐑇'), - (0x10420, 'M', u'𐑈'), - (0x10421, 'M', u'𐑉'), - (0x10422, 'M', u'𐑊'), - (0x10423, 'M', u'𐑋'), - (0x10424, 'M', u'𐑌'), - (0x10425, 'M', u'𐑍'), - (0x10426, 'M', u'𐑎'), - (0x10427, 'M', u'𐑏'), - (0x10428, 'V'), - (0x1049E, 'X'), - (0x104A0, 'V'), - (0x104AA, 'X'), - (0x104B0, 'M', u'𐓘'), - (0x104B1, 'M', u'𐓙'), - (0x104B2, 'M', u'𐓚'), - (0x104B3, 'M', u'𐓛'), - (0x104B4, 'M', u'𐓜'), - (0x104B5, 'M', u'𐓝'), - (0x104B6, 'M', u'𐓞'), - (0x104B7, 'M', u'𐓟'), - (0x104B8, 'M', u'𐓠'), - (0x104B9, 'M', u'𐓡'), - (0x104BA, 'M', u'𐓢'), - (0x104BB, 'M', u'𐓣'), - (0x104BC, 'M', u'𐓤'), - (0x104BD, 'M', u'𐓥'), - (0x104BE, 'M', u'𐓦'), - (0x104BF, 'M', u'𐓧'), - (0x104C0, 'M', u'𐓨'), - (0x104C1, 'M', u'𐓩'), - (0x104C2, 'M', u'𐓪'), - (0x104C3, 'M', u'𐓫'), - (0x104C4, 'M', u'𐓬'), - (0x104C5, 'M', u'𐓭'), - (0x104C6, 'M', u'𐓮'), - (0x104C7, 'M', u'𐓯'), - (0x104C8, 'M', u'𐓰'), - (0x104C9, 'M', u'𐓱'), - (0x104CA, 'M', u'𐓲'), - (0x104CB, 'M', u'𐓳'), - (0x104CC, 'M', u'𐓴'), - (0x104CD, 'M', u'𐓵'), - (0x104CE, 'M', u'𐓶'), - (0x104CF, 'M', u'𐓷'), - (0x104D0, 'M', u'𐓸'), - (0x104D1, 'M', u'𐓹'), - (0x104D2, 'M', u'𐓺'), - (0x104D3, 'M', u'𐓻'), - (0x104D4, 'X'), - (0x104D8, 'V'), - (0x104FC, 'X'), - (0x10500, 'V'), - (0x10528, 'X'), - (0x10530, 'V'), - (0x10564, 'X'), - (0x1056F, 'V'), - (0x10570, 'X'), - (0x10600, 'V'), - (0x10737, 'X'), - (0x10740, 'V'), - (0x10756, 'X'), - (0x10760, 'V'), - (0x10768, 'X'), - (0x10800, 'V'), - (0x10806, 'X'), - (0x10808, 'V'), - (0x10809, 'X'), - (0x1080A, 'V'), - (0x10836, 'X'), - (0x10837, 'V'), - ] - -def _seg_54(): - return [ - (0x10839, 'X'), - (0x1083C, 'V'), - (0x1083D, 'X'), - (0x1083F, 'V'), - (0x10856, 'X'), - (0x10857, 'V'), - (0x1089F, 'X'), - (0x108A7, 'V'), - (0x108B0, 'X'), - (0x108E0, 'V'), - (0x108F3, 'X'), - (0x108F4, 'V'), - (0x108F6, 'X'), - (0x108FB, 'V'), - (0x1091C, 'X'), - (0x1091F, 'V'), - (0x1093A, 'X'), - (0x1093F, 'V'), - (0x10940, 'X'), - (0x10980, 'V'), - (0x109B8, 'X'), - (0x109BC, 'V'), - (0x109D0, 'X'), - (0x109D2, 'V'), - (0x10A04, 'X'), - (0x10A05, 'V'), - (0x10A07, 'X'), - (0x10A0C, 'V'), - (0x10A14, 'X'), - (0x10A15, 'V'), - (0x10A18, 'X'), - (0x10A19, 'V'), - (0x10A36, 'X'), - (0x10A38, 'V'), - (0x10A3B, 'X'), - (0x10A3F, 'V'), - (0x10A49, 'X'), - (0x10A50, 'V'), - (0x10A59, 'X'), - (0x10A60, 'V'), - (0x10AA0, 'X'), - (0x10AC0, 'V'), - (0x10AE7, 'X'), - (0x10AEB, 'V'), - (0x10AF7, 'X'), - (0x10B00, 'V'), - (0x10B36, 'X'), - (0x10B39, 'V'), - (0x10B56, 'X'), - (0x10B58, 'V'), - (0x10B73, 'X'), - (0x10B78, 'V'), - (0x10B92, 'X'), - (0x10B99, 'V'), - (0x10B9D, 'X'), - (0x10BA9, 'V'), - (0x10BB0, 'X'), - (0x10C00, 'V'), - (0x10C49, 'X'), - (0x10C80, 'M', u'𐳀'), - (0x10C81, 'M', u'𐳁'), - (0x10C82, 'M', u'𐳂'), - (0x10C83, 'M', u'𐳃'), - (0x10C84, 'M', u'𐳄'), - (0x10C85, 'M', u'𐳅'), - (0x10C86, 'M', u'𐳆'), - (0x10C87, 'M', u'𐳇'), - (0x10C88, 'M', u'𐳈'), - (0x10C89, 'M', u'𐳉'), - (0x10C8A, 'M', u'𐳊'), - (0x10C8B, 'M', u'𐳋'), - (0x10C8C, 'M', u'𐳌'), - (0x10C8D, 'M', u'𐳍'), - (0x10C8E, 'M', u'𐳎'), - (0x10C8F, 'M', u'𐳏'), - (0x10C90, 'M', u'𐳐'), - (0x10C91, 'M', u'𐳑'), - (0x10C92, 'M', u'𐳒'), - (0x10C93, 'M', u'𐳓'), - (0x10C94, 'M', u'𐳔'), - (0x10C95, 'M', u'𐳕'), - (0x10C96, 'M', u'𐳖'), - (0x10C97, 'M', u'𐳗'), - (0x10C98, 'M', u'𐳘'), - (0x10C99, 'M', u'𐳙'), - (0x10C9A, 'M', u'𐳚'), - (0x10C9B, 'M', u'𐳛'), - (0x10C9C, 'M', u'𐳜'), - (0x10C9D, 'M', u'𐳝'), - (0x10C9E, 'M', u'𐳞'), - (0x10C9F, 'M', u'𐳟'), - (0x10CA0, 'M', u'𐳠'), - (0x10CA1, 'M', u'𐳡'), - (0x10CA2, 'M', u'𐳢'), - (0x10CA3, 'M', u'𐳣'), - (0x10CA4, 'M', u'𐳤'), - (0x10CA5, 'M', u'𐳥'), - (0x10CA6, 'M', u'𐳦'), - (0x10CA7, 'M', u'𐳧'), - (0x10CA8, 'M', u'𐳨'), - ] - -def _seg_55(): - return [ - (0x10CA9, 'M', u'𐳩'), - (0x10CAA, 'M', u'𐳪'), - (0x10CAB, 'M', u'𐳫'), - (0x10CAC, 'M', u'𐳬'), - (0x10CAD, 'M', u'𐳭'), - (0x10CAE, 'M', u'𐳮'), - (0x10CAF, 'M', u'𐳯'), - (0x10CB0, 'M', u'𐳰'), - (0x10CB1, 'M', u'𐳱'), - (0x10CB2, 'M', u'𐳲'), - (0x10CB3, 'X'), - (0x10CC0, 'V'), - (0x10CF3, 'X'), - (0x10CFA, 'V'), - (0x10D28, 'X'), - (0x10D30, 'V'), - (0x10D3A, 'X'), - (0x10E60, 'V'), - (0x10E7F, 'X'), - (0x10F00, 'V'), - (0x10F28, 'X'), - (0x10F30, 'V'), - (0x10F5A, 'X'), - (0x11000, 'V'), - (0x1104E, 'X'), - (0x11052, 'V'), - (0x11070, 'X'), - (0x1107F, 'V'), - (0x110BD, 'X'), - (0x110BE, 'V'), - (0x110C2, 'X'), - (0x110D0, 'V'), - (0x110E9, 'X'), - (0x110F0, 'V'), - (0x110FA, 'X'), - (0x11100, 'V'), - (0x11135, 'X'), - (0x11136, 'V'), - (0x11147, 'X'), - (0x11150, 'V'), - (0x11177, 'X'), - (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), - (0x111E0, 'X'), - (0x111E1, 'V'), - (0x111F5, 'X'), - (0x11200, 'V'), - (0x11212, 'X'), - (0x11213, 'V'), - (0x1123F, 'X'), - (0x11280, 'V'), - (0x11287, 'X'), - (0x11288, 'V'), - (0x11289, 'X'), - (0x1128A, 'V'), - (0x1128E, 'X'), - (0x1128F, 'V'), - (0x1129E, 'X'), - (0x1129F, 'V'), - (0x112AA, 'X'), - (0x112B0, 'V'), - (0x112EB, 'X'), - (0x112F0, 'V'), - (0x112FA, 'X'), - (0x11300, 'V'), - (0x11304, 'X'), - (0x11305, 'V'), - (0x1130D, 'X'), - (0x1130F, 'V'), - (0x11311, 'X'), - (0x11313, 'V'), - (0x11329, 'X'), - (0x1132A, 'V'), - (0x11331, 'X'), - (0x11332, 'V'), - (0x11334, 'X'), - (0x11335, 'V'), - (0x1133A, 'X'), - (0x1133B, 'V'), - (0x11345, 'X'), - (0x11347, 'V'), - (0x11349, 'X'), - (0x1134B, 'V'), - (0x1134E, 'X'), - (0x11350, 'V'), - (0x11351, 'X'), - (0x11357, 'V'), - (0x11358, 'X'), - (0x1135D, 'V'), - (0x11364, 'X'), - (0x11366, 'V'), - (0x1136D, 'X'), - (0x11370, 'V'), - (0x11375, 'X'), - (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), - (0x1145C, 'X'), - (0x1145D, 'V'), - ] - -def _seg_56(): - return [ - (0x1145F, 'X'), - (0x11480, 'V'), - (0x114C8, 'X'), - (0x114D0, 'V'), - (0x114DA, 'X'), - (0x11580, 'V'), - (0x115B6, 'X'), - (0x115B8, 'V'), - (0x115DE, 'X'), - (0x11600, 'V'), - (0x11645, 'X'), - (0x11650, 'V'), - (0x1165A, 'X'), - (0x11660, 'V'), - (0x1166D, 'X'), - (0x11680, 'V'), - (0x116B8, 'X'), - (0x116C0, 'V'), - (0x116CA, 'X'), - (0x11700, 'V'), - (0x1171B, 'X'), - (0x1171D, 'V'), - (0x1172C, 'X'), - (0x11730, 'V'), - (0x11740, 'X'), - (0x11800, 'V'), - (0x1183C, 'X'), - (0x118A0, 'M', u'𑣀'), - (0x118A1, 'M', u'𑣁'), - (0x118A2, 'M', u'𑣂'), - (0x118A3, 'M', u'𑣃'), - (0x118A4, 'M', u'𑣄'), - (0x118A5, 'M', u'𑣅'), - (0x118A6, 'M', u'𑣆'), - (0x118A7, 'M', u'𑣇'), - (0x118A8, 'M', u'𑣈'), - (0x118A9, 'M', u'𑣉'), - (0x118AA, 'M', u'𑣊'), - (0x118AB, 'M', u'𑣋'), - (0x118AC, 'M', u'𑣌'), - (0x118AD, 'M', u'𑣍'), - (0x118AE, 'M', u'𑣎'), - (0x118AF, 'M', u'𑣏'), - (0x118B0, 'M', u'𑣐'), - (0x118B1, 'M', u'𑣑'), - (0x118B2, 'M', u'𑣒'), - (0x118B3, 'M', u'𑣓'), - (0x118B4, 'M', u'𑣔'), - (0x118B5, 'M', u'𑣕'), - (0x118B6, 'M', u'𑣖'), - (0x118B7, 'M', u'𑣗'), - (0x118B8, 'M', u'𑣘'), - (0x118B9, 'M', u'𑣙'), - (0x118BA, 'M', u'𑣚'), - (0x118BB, 'M', u'𑣛'), - (0x118BC, 'M', u'𑣜'), - (0x118BD, 'M', u'𑣝'), - (0x118BE, 'M', u'𑣞'), - (0x118BF, 'M', u'𑣟'), - (0x118C0, 'V'), - (0x118F3, 'X'), - (0x118FF, 'V'), - (0x11900, 'X'), - (0x11A00, 'V'), - (0x11A48, 'X'), - (0x11A50, 'V'), - (0x11A84, 'X'), - (0x11A86, 'V'), - (0x11AA3, 'X'), - (0x11AC0, 'V'), - (0x11AF9, 'X'), - (0x11C00, 'V'), - (0x11C09, 'X'), - (0x11C0A, 'V'), - (0x11C37, 'X'), - (0x11C38, 'V'), - (0x11C46, 'X'), - (0x11C50, 'V'), - (0x11C6D, 'X'), - (0x11C70, 'V'), - (0x11C90, 'X'), - (0x11C92, 'V'), - (0x11CA8, 'X'), - (0x11CA9, 'V'), - (0x11CB7, 'X'), - (0x11D00, 'V'), - (0x11D07, 'X'), - (0x11D08, 'V'), - (0x11D0A, 'X'), - (0x11D0B, 'V'), - (0x11D37, 'X'), - (0x11D3A, 'V'), - (0x11D3B, 'X'), - (0x11D3C, 'V'), - (0x11D3E, 'X'), - (0x11D3F, 'V'), - (0x11D48, 'X'), - (0x11D50, 'V'), - (0x11D5A, 'X'), - (0x11D60, 'V'), - ] - -def _seg_57(): - return [ - (0x11D66, 'X'), - (0x11D67, 'V'), - (0x11D69, 'X'), - (0x11D6A, 'V'), - (0x11D8F, 'X'), - (0x11D90, 'V'), - (0x11D92, 'X'), - (0x11D93, 'V'), - (0x11D99, 'X'), - (0x11DA0, 'V'), - (0x11DAA, 'X'), - (0x11EE0, 'V'), - (0x11EF9, 'X'), - (0x12000, 'V'), - (0x1239A, 'X'), - (0x12400, 'V'), - (0x1246F, 'X'), - (0x12470, 'V'), - (0x12475, 'X'), - (0x12480, 'V'), - (0x12544, 'X'), - (0x13000, 'V'), - (0x1342F, 'X'), - (0x14400, 'V'), - (0x14647, 'X'), - (0x16800, 'V'), - (0x16A39, 'X'), - (0x16A40, 'V'), - (0x16A5F, 'X'), - (0x16A60, 'V'), - (0x16A6A, 'X'), - (0x16A6E, 'V'), - (0x16A70, 'X'), - (0x16AD0, 'V'), - (0x16AEE, 'X'), - (0x16AF0, 'V'), - (0x16AF6, 'X'), - (0x16B00, 'V'), - (0x16B46, 'X'), - (0x16B50, 'V'), - (0x16B5A, 'X'), - (0x16B5B, 'V'), - (0x16B62, 'X'), - (0x16B63, 'V'), - (0x16B78, 'X'), - (0x16B7D, 'V'), - (0x16B90, 'X'), - (0x16E60, 'V'), - (0x16E9B, 'X'), - (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), - (0x16F8F, 'V'), - (0x16FA0, 'X'), - (0x16FE0, 'V'), - (0x16FE2, 'X'), - (0x17000, 'V'), - (0x187F2, 'X'), - (0x18800, 'V'), - (0x18AF3, 'X'), - (0x1B000, 'V'), - (0x1B11F, 'X'), - (0x1B170, 'V'), - (0x1B2FC, 'X'), - (0x1BC00, 'V'), - (0x1BC6B, 'X'), - (0x1BC70, 'V'), - (0x1BC7D, 'X'), - (0x1BC80, 'V'), - (0x1BC89, 'X'), - (0x1BC90, 'V'), - (0x1BC9A, 'X'), - (0x1BC9C, 'V'), - (0x1BCA0, 'I'), - (0x1BCA4, 'X'), - (0x1D000, 'V'), - (0x1D0F6, 'X'), - (0x1D100, 'V'), - (0x1D127, 'X'), - (0x1D129, 'V'), - (0x1D15E, 'M', u'𝅗𝅥'), - (0x1D15F, 'M', u'𝅘𝅥'), - (0x1D160, 'M', u'𝅘𝅥𝅮'), - (0x1D161, 'M', u'𝅘𝅥𝅯'), - (0x1D162, 'M', u'𝅘𝅥𝅰'), - (0x1D163, 'M', u'𝅘𝅥𝅱'), - (0x1D164, 'M', u'𝅘𝅥𝅲'), - (0x1D165, 'V'), - (0x1D173, 'X'), - (0x1D17B, 'V'), - (0x1D1BB, 'M', u'𝆹𝅥'), - (0x1D1BC, 'M', u'𝆺𝅥'), - (0x1D1BD, 'M', u'𝆹𝅥𝅮'), - (0x1D1BE, 'M', u'𝆺𝅥𝅮'), - (0x1D1BF, 'M', u'𝆹𝅥𝅯'), - (0x1D1C0, 'M', u'𝆺𝅥𝅯'), - (0x1D1C1, 'V'), - (0x1D1E9, 'X'), - (0x1D200, 'V'), - ] - -def _seg_58(): - return [ - (0x1D246, 'X'), - (0x1D2E0, 'V'), - (0x1D2F4, 'X'), - (0x1D300, 'V'), - (0x1D357, 'X'), - (0x1D360, 'V'), - (0x1D379, 'X'), - (0x1D400, 'M', u'a'), - (0x1D401, 'M', u'b'), - (0x1D402, 'M', u'c'), - (0x1D403, 'M', u'd'), - (0x1D404, 'M', u'e'), - (0x1D405, 'M', u'f'), - (0x1D406, 'M', u'g'), - (0x1D407, 'M', u'h'), - (0x1D408, 'M', u'i'), - (0x1D409, 'M', u'j'), - (0x1D40A, 'M', u'k'), - (0x1D40B, 'M', u'l'), - (0x1D40C, 'M', u'm'), - (0x1D40D, 'M', u'n'), - (0x1D40E, 'M', u'o'), - (0x1D40F, 'M', u'p'), - (0x1D410, 'M', u'q'), - (0x1D411, 'M', u'r'), - (0x1D412, 'M', u's'), - (0x1D413, 'M', u't'), - (0x1D414, 'M', u'u'), - (0x1D415, 'M', u'v'), - (0x1D416, 'M', u'w'), - (0x1D417, 'M', u'x'), - (0x1D418, 'M', u'y'), - (0x1D419, 'M', u'z'), - (0x1D41A, 'M', u'a'), - (0x1D41B, 'M', u'b'), - (0x1D41C, 'M', u'c'), - (0x1D41D, 'M', u'd'), - (0x1D41E, 'M', u'e'), - (0x1D41F, 'M', u'f'), - (0x1D420, 'M', u'g'), - (0x1D421, 'M', u'h'), - (0x1D422, 'M', u'i'), - (0x1D423, 'M', u'j'), - (0x1D424, 'M', u'k'), - (0x1D425, 'M', u'l'), - (0x1D426, 'M', u'm'), - (0x1D427, 'M', u'n'), - (0x1D428, 'M', u'o'), - (0x1D429, 'M', u'p'), - (0x1D42A, 'M', u'q'), - (0x1D42B, 'M', u'r'), - (0x1D42C, 'M', u's'), - (0x1D42D, 'M', u't'), - (0x1D42E, 'M', u'u'), - (0x1D42F, 'M', u'v'), - (0x1D430, 'M', u'w'), - (0x1D431, 'M', u'x'), - (0x1D432, 'M', u'y'), - (0x1D433, 'M', u'z'), - (0x1D434, 'M', u'a'), - (0x1D435, 'M', u'b'), - (0x1D436, 'M', u'c'), - (0x1D437, 'M', u'd'), - (0x1D438, 'M', u'e'), - (0x1D439, 'M', u'f'), - (0x1D43A, 'M', u'g'), - (0x1D43B, 'M', u'h'), - (0x1D43C, 'M', u'i'), - (0x1D43D, 'M', u'j'), - (0x1D43E, 'M', u'k'), - (0x1D43F, 'M', u'l'), - (0x1D440, 'M', u'm'), - (0x1D441, 'M', u'n'), - (0x1D442, 'M', u'o'), - (0x1D443, 'M', u'p'), - (0x1D444, 'M', u'q'), - (0x1D445, 'M', u'r'), - (0x1D446, 'M', u's'), - (0x1D447, 'M', u't'), - (0x1D448, 'M', u'u'), - (0x1D449, 'M', u'v'), - (0x1D44A, 'M', u'w'), - (0x1D44B, 'M', u'x'), - (0x1D44C, 'M', u'y'), - (0x1D44D, 'M', u'z'), - (0x1D44E, 'M', u'a'), - (0x1D44F, 'M', u'b'), - (0x1D450, 'M', u'c'), - (0x1D451, 'M', u'd'), - (0x1D452, 'M', u'e'), - (0x1D453, 'M', u'f'), - (0x1D454, 'M', u'g'), - (0x1D455, 'X'), - (0x1D456, 'M', u'i'), - (0x1D457, 'M', u'j'), - (0x1D458, 'M', u'k'), - (0x1D459, 'M', u'l'), - (0x1D45A, 'M', u'm'), - (0x1D45B, 'M', u'n'), - (0x1D45C, 'M', u'o'), - ] - -def _seg_59(): - return [ - (0x1D45D, 'M', u'p'), - (0x1D45E, 'M', u'q'), - (0x1D45F, 'M', u'r'), - (0x1D460, 'M', u's'), - (0x1D461, 'M', u't'), - (0x1D462, 'M', u'u'), - (0x1D463, 'M', u'v'), - (0x1D464, 'M', u'w'), - (0x1D465, 'M', u'x'), - (0x1D466, 'M', u'y'), - (0x1D467, 'M', u'z'), - (0x1D468, 'M', u'a'), - (0x1D469, 'M', u'b'), - (0x1D46A, 'M', u'c'), - (0x1D46B, 'M', u'd'), - (0x1D46C, 'M', u'e'), - (0x1D46D, 'M', u'f'), - (0x1D46E, 'M', u'g'), - (0x1D46F, 'M', u'h'), - (0x1D470, 'M', u'i'), - (0x1D471, 'M', u'j'), - (0x1D472, 'M', u'k'), - (0x1D473, 'M', u'l'), - (0x1D474, 'M', u'm'), - (0x1D475, 'M', u'n'), - (0x1D476, 'M', u'o'), - (0x1D477, 'M', u'p'), - (0x1D478, 'M', u'q'), - (0x1D479, 'M', u'r'), - (0x1D47A, 'M', u's'), - (0x1D47B, 'M', u't'), - (0x1D47C, 'M', u'u'), - (0x1D47D, 'M', u'v'), - (0x1D47E, 'M', u'w'), - (0x1D47F, 'M', u'x'), - (0x1D480, 'M', u'y'), - (0x1D481, 'M', u'z'), - (0x1D482, 'M', u'a'), - (0x1D483, 'M', u'b'), - (0x1D484, 'M', u'c'), - (0x1D485, 'M', u'd'), - (0x1D486, 'M', u'e'), - (0x1D487, 'M', u'f'), - (0x1D488, 'M', u'g'), - (0x1D489, 'M', u'h'), - (0x1D48A, 'M', u'i'), - (0x1D48B, 'M', u'j'), - (0x1D48C, 'M', u'k'), - (0x1D48D, 'M', u'l'), - (0x1D48E, 'M', u'm'), - (0x1D48F, 'M', u'n'), - (0x1D490, 'M', u'o'), - (0x1D491, 'M', u'p'), - (0x1D492, 'M', u'q'), - (0x1D493, 'M', u'r'), - (0x1D494, 'M', u's'), - (0x1D495, 'M', u't'), - (0x1D496, 'M', u'u'), - (0x1D497, 'M', u'v'), - (0x1D498, 'M', u'w'), - (0x1D499, 'M', u'x'), - (0x1D49A, 'M', u'y'), - (0x1D49B, 'M', u'z'), - (0x1D49C, 'M', u'a'), - (0x1D49D, 'X'), - (0x1D49E, 'M', u'c'), - (0x1D49F, 'M', u'd'), - (0x1D4A0, 'X'), - (0x1D4A2, 'M', u'g'), - (0x1D4A3, 'X'), - (0x1D4A5, 'M', u'j'), - (0x1D4A6, 'M', u'k'), - (0x1D4A7, 'X'), - (0x1D4A9, 'M', u'n'), - (0x1D4AA, 'M', u'o'), - (0x1D4AB, 'M', u'p'), - (0x1D4AC, 'M', u'q'), - (0x1D4AD, 'X'), - (0x1D4AE, 'M', u's'), - (0x1D4AF, 'M', u't'), - (0x1D4B0, 'M', u'u'), - (0x1D4B1, 'M', u'v'), - (0x1D4B2, 'M', u'w'), - (0x1D4B3, 'M', u'x'), - (0x1D4B4, 'M', u'y'), - (0x1D4B5, 'M', u'z'), - (0x1D4B6, 'M', u'a'), - (0x1D4B7, 'M', u'b'), - (0x1D4B8, 'M', u'c'), - (0x1D4B9, 'M', u'd'), - (0x1D4BA, 'X'), - (0x1D4BB, 'M', u'f'), - (0x1D4BC, 'X'), - (0x1D4BD, 'M', u'h'), - (0x1D4BE, 'M', u'i'), - (0x1D4BF, 'M', u'j'), - (0x1D4C0, 'M', u'k'), - (0x1D4C1, 'M', u'l'), - (0x1D4C2, 'M', u'm'), - (0x1D4C3, 'M', u'n'), - ] - -def _seg_60(): - return [ - (0x1D4C4, 'X'), - (0x1D4C5, 'M', u'p'), - (0x1D4C6, 'M', u'q'), - (0x1D4C7, 'M', u'r'), - (0x1D4C8, 'M', u's'), - (0x1D4C9, 'M', u't'), - (0x1D4CA, 'M', u'u'), - (0x1D4CB, 'M', u'v'), - (0x1D4CC, 'M', u'w'), - (0x1D4CD, 'M', u'x'), - (0x1D4CE, 'M', u'y'), - (0x1D4CF, 'M', u'z'), - (0x1D4D0, 'M', u'a'), - (0x1D4D1, 'M', u'b'), - (0x1D4D2, 'M', u'c'), - (0x1D4D3, 'M', u'd'), - (0x1D4D4, 'M', u'e'), - (0x1D4D5, 'M', u'f'), - (0x1D4D6, 'M', u'g'), - (0x1D4D7, 'M', u'h'), - (0x1D4D8, 'M', u'i'), - (0x1D4D9, 'M', u'j'), - (0x1D4DA, 'M', u'k'), - (0x1D4DB, 'M', u'l'), - (0x1D4DC, 'M', u'm'), - (0x1D4DD, 'M', u'n'), - (0x1D4DE, 'M', u'o'), - (0x1D4DF, 'M', u'p'), - (0x1D4E0, 'M', u'q'), - (0x1D4E1, 'M', u'r'), - (0x1D4E2, 'M', u's'), - (0x1D4E3, 'M', u't'), - (0x1D4E4, 'M', u'u'), - (0x1D4E5, 'M', u'v'), - (0x1D4E6, 'M', u'w'), - (0x1D4E7, 'M', u'x'), - (0x1D4E8, 'M', u'y'), - (0x1D4E9, 'M', u'z'), - (0x1D4EA, 'M', u'a'), - (0x1D4EB, 'M', u'b'), - (0x1D4EC, 'M', u'c'), - (0x1D4ED, 'M', u'd'), - (0x1D4EE, 'M', u'e'), - (0x1D4EF, 'M', u'f'), - (0x1D4F0, 'M', u'g'), - (0x1D4F1, 'M', u'h'), - (0x1D4F2, 'M', u'i'), - (0x1D4F3, 'M', u'j'), - (0x1D4F4, 'M', u'k'), - (0x1D4F5, 'M', u'l'), - (0x1D4F6, 'M', u'm'), - (0x1D4F7, 'M', u'n'), - (0x1D4F8, 'M', u'o'), - (0x1D4F9, 'M', u'p'), - (0x1D4FA, 'M', u'q'), - (0x1D4FB, 'M', u'r'), - (0x1D4FC, 'M', u's'), - (0x1D4FD, 'M', u't'), - (0x1D4FE, 'M', u'u'), - (0x1D4FF, 'M', u'v'), - (0x1D500, 'M', u'w'), - (0x1D501, 'M', u'x'), - (0x1D502, 'M', u'y'), - (0x1D503, 'M', u'z'), - (0x1D504, 'M', u'a'), - (0x1D505, 'M', u'b'), - (0x1D506, 'X'), - (0x1D507, 'M', u'd'), - (0x1D508, 'M', u'e'), - (0x1D509, 'M', u'f'), - (0x1D50A, 'M', u'g'), - (0x1D50B, 'X'), - (0x1D50D, 'M', u'j'), - (0x1D50E, 'M', u'k'), - (0x1D50F, 'M', u'l'), - (0x1D510, 'M', u'm'), - (0x1D511, 'M', u'n'), - (0x1D512, 'M', u'o'), - (0x1D513, 'M', u'p'), - (0x1D514, 'M', u'q'), - (0x1D515, 'X'), - (0x1D516, 'M', u's'), - (0x1D517, 'M', u't'), - (0x1D518, 'M', u'u'), - (0x1D519, 'M', u'v'), - (0x1D51A, 'M', u'w'), - (0x1D51B, 'M', u'x'), - (0x1D51C, 'M', u'y'), - (0x1D51D, 'X'), - (0x1D51E, 'M', u'a'), - (0x1D51F, 'M', u'b'), - (0x1D520, 'M', u'c'), - (0x1D521, 'M', u'd'), - (0x1D522, 'M', u'e'), - (0x1D523, 'M', u'f'), - (0x1D524, 'M', u'g'), - (0x1D525, 'M', u'h'), - (0x1D526, 'M', u'i'), - (0x1D527, 'M', u'j'), - (0x1D528, 'M', u'k'), - ] - -def _seg_61(): - return [ - (0x1D529, 'M', u'l'), - (0x1D52A, 'M', u'm'), - (0x1D52B, 'M', u'n'), - (0x1D52C, 'M', u'o'), - (0x1D52D, 'M', u'p'), - (0x1D52E, 'M', u'q'), - (0x1D52F, 'M', u'r'), - (0x1D530, 'M', u's'), - (0x1D531, 'M', u't'), - (0x1D532, 'M', u'u'), - (0x1D533, 'M', u'v'), - (0x1D534, 'M', u'w'), - (0x1D535, 'M', u'x'), - (0x1D536, 'M', u'y'), - (0x1D537, 'M', u'z'), - (0x1D538, 'M', u'a'), - (0x1D539, 'M', u'b'), - (0x1D53A, 'X'), - (0x1D53B, 'M', u'd'), - (0x1D53C, 'M', u'e'), - (0x1D53D, 'M', u'f'), - (0x1D53E, 'M', u'g'), - (0x1D53F, 'X'), - (0x1D540, 'M', u'i'), - (0x1D541, 'M', u'j'), - (0x1D542, 'M', u'k'), - (0x1D543, 'M', u'l'), - (0x1D544, 'M', u'm'), - (0x1D545, 'X'), - (0x1D546, 'M', u'o'), - (0x1D547, 'X'), - (0x1D54A, 'M', u's'), - (0x1D54B, 'M', u't'), - (0x1D54C, 'M', u'u'), - (0x1D54D, 'M', u'v'), - (0x1D54E, 'M', u'w'), - (0x1D54F, 'M', u'x'), - (0x1D550, 'M', u'y'), - (0x1D551, 'X'), - (0x1D552, 'M', u'a'), - (0x1D553, 'M', u'b'), - (0x1D554, 'M', u'c'), - (0x1D555, 'M', u'd'), - (0x1D556, 'M', u'e'), - (0x1D557, 'M', u'f'), - (0x1D558, 'M', u'g'), - (0x1D559, 'M', u'h'), - (0x1D55A, 'M', u'i'), - (0x1D55B, 'M', u'j'), - (0x1D55C, 'M', u'k'), - (0x1D55D, 'M', u'l'), - (0x1D55E, 'M', u'm'), - (0x1D55F, 'M', u'n'), - (0x1D560, 'M', u'o'), - (0x1D561, 'M', u'p'), - (0x1D562, 'M', u'q'), - (0x1D563, 'M', u'r'), - (0x1D564, 'M', u's'), - (0x1D565, 'M', u't'), - (0x1D566, 'M', u'u'), - (0x1D567, 'M', u'v'), - (0x1D568, 'M', u'w'), - (0x1D569, 'M', u'x'), - (0x1D56A, 'M', u'y'), - (0x1D56B, 'M', u'z'), - (0x1D56C, 'M', u'a'), - (0x1D56D, 'M', u'b'), - (0x1D56E, 'M', u'c'), - (0x1D56F, 'M', u'd'), - (0x1D570, 'M', u'e'), - (0x1D571, 'M', u'f'), - (0x1D572, 'M', u'g'), - (0x1D573, 'M', u'h'), - (0x1D574, 'M', u'i'), - (0x1D575, 'M', u'j'), - (0x1D576, 'M', u'k'), - (0x1D577, 'M', u'l'), - (0x1D578, 'M', u'm'), - (0x1D579, 'M', u'n'), - (0x1D57A, 'M', u'o'), - (0x1D57B, 'M', u'p'), - (0x1D57C, 'M', u'q'), - (0x1D57D, 'M', u'r'), - (0x1D57E, 'M', u's'), - (0x1D57F, 'M', u't'), - (0x1D580, 'M', u'u'), - (0x1D581, 'M', u'v'), - (0x1D582, 'M', u'w'), - (0x1D583, 'M', u'x'), - (0x1D584, 'M', u'y'), - (0x1D585, 'M', u'z'), - (0x1D586, 'M', u'a'), - (0x1D587, 'M', u'b'), - (0x1D588, 'M', u'c'), - (0x1D589, 'M', u'd'), - (0x1D58A, 'M', u'e'), - (0x1D58B, 'M', u'f'), - (0x1D58C, 'M', u'g'), - (0x1D58D, 'M', u'h'), - (0x1D58E, 'M', u'i'), - ] - -def _seg_62(): - return [ - (0x1D58F, 'M', u'j'), - (0x1D590, 'M', u'k'), - (0x1D591, 'M', u'l'), - (0x1D592, 'M', u'm'), - (0x1D593, 'M', u'n'), - (0x1D594, 'M', u'o'), - (0x1D595, 'M', u'p'), - (0x1D596, 'M', u'q'), - (0x1D597, 'M', u'r'), - (0x1D598, 'M', u's'), - (0x1D599, 'M', u't'), - (0x1D59A, 'M', u'u'), - (0x1D59B, 'M', u'v'), - (0x1D59C, 'M', u'w'), - (0x1D59D, 'M', u'x'), - (0x1D59E, 'M', u'y'), - (0x1D59F, 'M', u'z'), - (0x1D5A0, 'M', u'a'), - (0x1D5A1, 'M', u'b'), - (0x1D5A2, 'M', u'c'), - (0x1D5A3, 'M', u'd'), - (0x1D5A4, 'M', u'e'), - (0x1D5A5, 'M', u'f'), - (0x1D5A6, 'M', u'g'), - (0x1D5A7, 'M', u'h'), - (0x1D5A8, 'M', u'i'), - (0x1D5A9, 'M', u'j'), - (0x1D5AA, 'M', u'k'), - (0x1D5AB, 'M', u'l'), - (0x1D5AC, 'M', u'm'), - (0x1D5AD, 'M', u'n'), - (0x1D5AE, 'M', u'o'), - (0x1D5AF, 'M', u'p'), - (0x1D5B0, 'M', u'q'), - (0x1D5B1, 'M', u'r'), - (0x1D5B2, 'M', u's'), - (0x1D5B3, 'M', u't'), - (0x1D5B4, 'M', u'u'), - (0x1D5B5, 'M', u'v'), - (0x1D5B6, 'M', u'w'), - (0x1D5B7, 'M', u'x'), - (0x1D5B8, 'M', u'y'), - (0x1D5B9, 'M', u'z'), - (0x1D5BA, 'M', u'a'), - (0x1D5BB, 'M', u'b'), - (0x1D5BC, 'M', u'c'), - (0x1D5BD, 'M', u'd'), - (0x1D5BE, 'M', u'e'), - (0x1D5BF, 'M', u'f'), - (0x1D5C0, 'M', u'g'), - (0x1D5C1, 'M', u'h'), - (0x1D5C2, 'M', u'i'), - (0x1D5C3, 'M', u'j'), - (0x1D5C4, 'M', u'k'), - (0x1D5C5, 'M', u'l'), - (0x1D5C6, 'M', u'm'), - (0x1D5C7, 'M', u'n'), - (0x1D5C8, 'M', u'o'), - (0x1D5C9, 'M', u'p'), - (0x1D5CA, 'M', u'q'), - (0x1D5CB, 'M', u'r'), - (0x1D5CC, 'M', u's'), - (0x1D5CD, 'M', u't'), - (0x1D5CE, 'M', u'u'), - (0x1D5CF, 'M', u'v'), - (0x1D5D0, 'M', u'w'), - (0x1D5D1, 'M', u'x'), - (0x1D5D2, 'M', u'y'), - (0x1D5D3, 'M', u'z'), - (0x1D5D4, 'M', u'a'), - (0x1D5D5, 'M', u'b'), - (0x1D5D6, 'M', u'c'), - (0x1D5D7, 'M', u'd'), - (0x1D5D8, 'M', u'e'), - (0x1D5D9, 'M', u'f'), - (0x1D5DA, 'M', u'g'), - (0x1D5DB, 'M', u'h'), - (0x1D5DC, 'M', u'i'), - (0x1D5DD, 'M', u'j'), - (0x1D5DE, 'M', u'k'), - (0x1D5DF, 'M', u'l'), - (0x1D5E0, 'M', u'm'), - (0x1D5E1, 'M', u'n'), - (0x1D5E2, 'M', u'o'), - (0x1D5E3, 'M', u'p'), - (0x1D5E4, 'M', u'q'), - (0x1D5E5, 'M', u'r'), - (0x1D5E6, 'M', u's'), - (0x1D5E7, 'M', u't'), - (0x1D5E8, 'M', u'u'), - (0x1D5E9, 'M', u'v'), - (0x1D5EA, 'M', u'w'), - (0x1D5EB, 'M', u'x'), - (0x1D5EC, 'M', u'y'), - (0x1D5ED, 'M', u'z'), - (0x1D5EE, 'M', u'a'), - (0x1D5EF, 'M', u'b'), - (0x1D5F0, 'M', u'c'), - (0x1D5F1, 'M', u'd'), - (0x1D5F2, 'M', u'e'), - ] - -def _seg_63(): - return [ - (0x1D5F3, 'M', u'f'), - (0x1D5F4, 'M', u'g'), - (0x1D5F5, 'M', u'h'), - (0x1D5F6, 'M', u'i'), - (0x1D5F7, 'M', u'j'), - (0x1D5F8, 'M', u'k'), - (0x1D5F9, 'M', u'l'), - (0x1D5FA, 'M', u'm'), - (0x1D5FB, 'M', u'n'), - (0x1D5FC, 'M', u'o'), - (0x1D5FD, 'M', u'p'), - (0x1D5FE, 'M', u'q'), - (0x1D5FF, 'M', u'r'), - (0x1D600, 'M', u's'), - (0x1D601, 'M', u't'), - (0x1D602, 'M', u'u'), - (0x1D603, 'M', u'v'), - (0x1D604, 'M', u'w'), - (0x1D605, 'M', u'x'), - (0x1D606, 'M', u'y'), - (0x1D607, 'M', u'z'), - (0x1D608, 'M', u'a'), - (0x1D609, 'M', u'b'), - (0x1D60A, 'M', u'c'), - (0x1D60B, 'M', u'd'), - (0x1D60C, 'M', u'e'), - (0x1D60D, 'M', u'f'), - (0x1D60E, 'M', u'g'), - (0x1D60F, 'M', u'h'), - (0x1D610, 'M', u'i'), - (0x1D611, 'M', u'j'), - (0x1D612, 'M', u'k'), - (0x1D613, 'M', u'l'), - (0x1D614, 'M', u'm'), - (0x1D615, 'M', u'n'), - (0x1D616, 'M', u'o'), - (0x1D617, 'M', u'p'), - (0x1D618, 'M', u'q'), - (0x1D619, 'M', u'r'), - (0x1D61A, 'M', u's'), - (0x1D61B, 'M', u't'), - (0x1D61C, 'M', u'u'), - (0x1D61D, 'M', u'v'), - (0x1D61E, 'M', u'w'), - (0x1D61F, 'M', u'x'), - (0x1D620, 'M', u'y'), - (0x1D621, 'M', u'z'), - (0x1D622, 'M', u'a'), - (0x1D623, 'M', u'b'), - (0x1D624, 'M', u'c'), - (0x1D625, 'M', u'd'), - (0x1D626, 'M', u'e'), - (0x1D627, 'M', u'f'), - (0x1D628, 'M', u'g'), - (0x1D629, 'M', u'h'), - (0x1D62A, 'M', u'i'), - (0x1D62B, 'M', u'j'), - (0x1D62C, 'M', u'k'), - (0x1D62D, 'M', u'l'), - (0x1D62E, 'M', u'm'), - (0x1D62F, 'M', u'n'), - (0x1D630, 'M', u'o'), - (0x1D631, 'M', u'p'), - (0x1D632, 'M', u'q'), - (0x1D633, 'M', u'r'), - (0x1D634, 'M', u's'), - (0x1D635, 'M', u't'), - (0x1D636, 'M', u'u'), - (0x1D637, 'M', u'v'), - (0x1D638, 'M', u'w'), - (0x1D639, 'M', u'x'), - (0x1D63A, 'M', u'y'), - (0x1D63B, 'M', u'z'), - (0x1D63C, 'M', u'a'), - (0x1D63D, 'M', u'b'), - (0x1D63E, 'M', u'c'), - (0x1D63F, 'M', u'd'), - (0x1D640, 'M', u'e'), - (0x1D641, 'M', u'f'), - (0x1D642, 'M', u'g'), - (0x1D643, 'M', u'h'), - (0x1D644, 'M', u'i'), - (0x1D645, 'M', u'j'), - (0x1D646, 'M', u'k'), - (0x1D647, 'M', u'l'), - (0x1D648, 'M', u'm'), - (0x1D649, 'M', u'n'), - (0x1D64A, 'M', u'o'), - (0x1D64B, 'M', u'p'), - (0x1D64C, 'M', u'q'), - (0x1D64D, 'M', u'r'), - (0x1D64E, 'M', u's'), - (0x1D64F, 'M', u't'), - (0x1D650, 'M', u'u'), - (0x1D651, 'M', u'v'), - (0x1D652, 'M', u'w'), - (0x1D653, 'M', u'x'), - (0x1D654, 'M', u'y'), - (0x1D655, 'M', u'z'), - (0x1D656, 'M', u'a'), - ] - -def _seg_64(): - return [ - (0x1D657, 'M', u'b'), - (0x1D658, 'M', u'c'), - (0x1D659, 'M', u'd'), - (0x1D65A, 'M', u'e'), - (0x1D65B, 'M', u'f'), - (0x1D65C, 'M', u'g'), - (0x1D65D, 'M', u'h'), - (0x1D65E, 'M', u'i'), - (0x1D65F, 'M', u'j'), - (0x1D660, 'M', u'k'), - (0x1D661, 'M', u'l'), - (0x1D662, 'M', u'm'), - (0x1D663, 'M', u'n'), - (0x1D664, 'M', u'o'), - (0x1D665, 'M', u'p'), - (0x1D666, 'M', u'q'), - (0x1D667, 'M', u'r'), - (0x1D668, 'M', u's'), - (0x1D669, 'M', u't'), - (0x1D66A, 'M', u'u'), - (0x1D66B, 'M', u'v'), - (0x1D66C, 'M', u'w'), - (0x1D66D, 'M', u'x'), - (0x1D66E, 'M', u'y'), - (0x1D66F, 'M', u'z'), - (0x1D670, 'M', u'a'), - (0x1D671, 'M', u'b'), - (0x1D672, 'M', u'c'), - (0x1D673, 'M', u'd'), - (0x1D674, 'M', u'e'), - (0x1D675, 'M', u'f'), - (0x1D676, 'M', u'g'), - (0x1D677, 'M', u'h'), - (0x1D678, 'M', u'i'), - (0x1D679, 'M', u'j'), - (0x1D67A, 'M', u'k'), - (0x1D67B, 'M', u'l'), - (0x1D67C, 'M', u'm'), - (0x1D67D, 'M', u'n'), - (0x1D67E, 'M', u'o'), - (0x1D67F, 'M', u'p'), - (0x1D680, 'M', u'q'), - (0x1D681, 'M', u'r'), - (0x1D682, 'M', u's'), - (0x1D683, 'M', u't'), - (0x1D684, 'M', u'u'), - (0x1D685, 'M', u'v'), - (0x1D686, 'M', u'w'), - (0x1D687, 'M', u'x'), - (0x1D688, 'M', u'y'), - (0x1D689, 'M', u'z'), - (0x1D68A, 'M', u'a'), - (0x1D68B, 'M', u'b'), - (0x1D68C, 'M', u'c'), - (0x1D68D, 'M', u'd'), - (0x1D68E, 'M', u'e'), - (0x1D68F, 'M', u'f'), - (0x1D690, 'M', u'g'), - (0x1D691, 'M', u'h'), - (0x1D692, 'M', u'i'), - (0x1D693, 'M', u'j'), - (0x1D694, 'M', u'k'), - (0x1D695, 'M', u'l'), - (0x1D696, 'M', u'm'), - (0x1D697, 'M', u'n'), - (0x1D698, 'M', u'o'), - (0x1D699, 'M', u'p'), - (0x1D69A, 'M', u'q'), - (0x1D69B, 'M', u'r'), - (0x1D69C, 'M', u's'), - (0x1D69D, 'M', u't'), - (0x1D69E, 'M', u'u'), - (0x1D69F, 'M', u'v'), - (0x1D6A0, 'M', u'w'), - (0x1D6A1, 'M', u'x'), - (0x1D6A2, 'M', u'y'), - (0x1D6A3, 'M', u'z'), - (0x1D6A4, 'M', u'ı'), - (0x1D6A5, 'M', u'ȷ'), - (0x1D6A6, 'X'), - (0x1D6A8, 'M', u'α'), - (0x1D6A9, 'M', u'β'), - (0x1D6AA, 'M', u'γ'), - (0x1D6AB, 'M', u'δ'), - (0x1D6AC, 'M', u'ε'), - (0x1D6AD, 'M', u'ζ'), - (0x1D6AE, 'M', u'η'), - (0x1D6AF, 'M', u'θ'), - (0x1D6B0, 'M', u'ι'), - (0x1D6B1, 'M', u'κ'), - (0x1D6B2, 'M', u'λ'), - (0x1D6B3, 'M', u'μ'), - (0x1D6B4, 'M', u'ν'), - (0x1D6B5, 'M', u'ξ'), - (0x1D6B6, 'M', u'ο'), - (0x1D6B7, 'M', u'π'), - (0x1D6B8, 'M', u'ρ'), - (0x1D6B9, 'M', u'θ'), - (0x1D6BA, 'M', u'σ'), - (0x1D6BB, 'M', u'τ'), - ] - -def _seg_65(): - return [ - (0x1D6BC, 'M', u'υ'), - (0x1D6BD, 'M', u'φ'), - (0x1D6BE, 'M', u'χ'), - (0x1D6BF, 'M', u'ψ'), - (0x1D6C0, 'M', u'ω'), - (0x1D6C1, 'M', u'∇'), - (0x1D6C2, 'M', u'α'), - (0x1D6C3, 'M', u'β'), - (0x1D6C4, 'M', u'γ'), - (0x1D6C5, 'M', u'δ'), - (0x1D6C6, 'M', u'ε'), - (0x1D6C7, 'M', u'ζ'), - (0x1D6C8, 'M', u'η'), - (0x1D6C9, 'M', u'θ'), - (0x1D6CA, 'M', u'ι'), - (0x1D6CB, 'M', u'κ'), - (0x1D6CC, 'M', u'λ'), - (0x1D6CD, 'M', u'μ'), - (0x1D6CE, 'M', u'ν'), - (0x1D6CF, 'M', u'ξ'), - (0x1D6D0, 'M', u'ο'), - (0x1D6D1, 'M', u'π'), - (0x1D6D2, 'M', u'ρ'), - (0x1D6D3, 'M', u'σ'), - (0x1D6D5, 'M', u'τ'), - (0x1D6D6, 'M', u'υ'), - (0x1D6D7, 'M', u'φ'), - (0x1D6D8, 'M', u'χ'), - (0x1D6D9, 'M', u'ψ'), - (0x1D6DA, 'M', u'ω'), - (0x1D6DB, 'M', u'∂'), - (0x1D6DC, 'M', u'ε'), - (0x1D6DD, 'M', u'θ'), - (0x1D6DE, 'M', u'κ'), - (0x1D6DF, 'M', u'φ'), - (0x1D6E0, 'M', u'ρ'), - (0x1D6E1, 'M', u'π'), - (0x1D6E2, 'M', u'α'), - (0x1D6E3, 'M', u'β'), - (0x1D6E4, 'M', u'γ'), - (0x1D6E5, 'M', u'δ'), - (0x1D6E6, 'M', u'ε'), - (0x1D6E7, 'M', u'ζ'), - (0x1D6E8, 'M', u'η'), - (0x1D6E9, 'M', u'θ'), - (0x1D6EA, 'M', u'ι'), - (0x1D6EB, 'M', u'κ'), - (0x1D6EC, 'M', u'λ'), - (0x1D6ED, 'M', u'μ'), - (0x1D6EE, 'M', u'ν'), - (0x1D6EF, 'M', u'ξ'), - (0x1D6F0, 'M', u'ο'), - (0x1D6F1, 'M', u'π'), - (0x1D6F2, 'M', u'ρ'), - (0x1D6F3, 'M', u'θ'), - (0x1D6F4, 'M', u'σ'), - (0x1D6F5, 'M', u'τ'), - (0x1D6F6, 'M', u'υ'), - (0x1D6F7, 'M', u'φ'), - (0x1D6F8, 'M', u'χ'), - (0x1D6F9, 'M', u'ψ'), - (0x1D6FA, 'M', u'ω'), - (0x1D6FB, 'M', u'∇'), - (0x1D6FC, 'M', u'α'), - (0x1D6FD, 'M', u'β'), - (0x1D6FE, 'M', u'γ'), - (0x1D6FF, 'M', u'δ'), - (0x1D700, 'M', u'ε'), - (0x1D701, 'M', u'ζ'), - (0x1D702, 'M', u'η'), - (0x1D703, 'M', u'θ'), - (0x1D704, 'M', u'ι'), - (0x1D705, 'M', u'κ'), - (0x1D706, 'M', u'λ'), - (0x1D707, 'M', u'μ'), - (0x1D708, 'M', u'ν'), - (0x1D709, 'M', u'ξ'), - (0x1D70A, 'M', u'ο'), - (0x1D70B, 'M', u'π'), - (0x1D70C, 'M', u'ρ'), - (0x1D70D, 'M', u'σ'), - (0x1D70F, 'M', u'τ'), - (0x1D710, 'M', u'υ'), - (0x1D711, 'M', u'φ'), - (0x1D712, 'M', u'χ'), - (0x1D713, 'M', u'ψ'), - (0x1D714, 'M', u'ω'), - (0x1D715, 'M', u'∂'), - (0x1D716, 'M', u'ε'), - (0x1D717, 'M', u'θ'), - (0x1D718, 'M', u'κ'), - (0x1D719, 'M', u'φ'), - (0x1D71A, 'M', u'ρ'), - (0x1D71B, 'M', u'π'), - (0x1D71C, 'M', u'α'), - (0x1D71D, 'M', u'β'), - (0x1D71E, 'M', u'γ'), - (0x1D71F, 'M', u'δ'), - (0x1D720, 'M', u'ε'), - (0x1D721, 'M', u'ζ'), - ] - -def _seg_66(): - return [ - (0x1D722, 'M', u'η'), - (0x1D723, 'M', u'θ'), - (0x1D724, 'M', u'ι'), - (0x1D725, 'M', u'κ'), - (0x1D726, 'M', u'λ'), - (0x1D727, 'M', u'μ'), - (0x1D728, 'M', u'ν'), - (0x1D729, 'M', u'ξ'), - (0x1D72A, 'M', u'ο'), - (0x1D72B, 'M', u'π'), - (0x1D72C, 'M', u'ρ'), - (0x1D72D, 'M', u'θ'), - (0x1D72E, 'M', u'σ'), - (0x1D72F, 'M', u'τ'), - (0x1D730, 'M', u'υ'), - (0x1D731, 'M', u'φ'), - (0x1D732, 'M', u'χ'), - (0x1D733, 'M', u'ψ'), - (0x1D734, 'M', u'ω'), - (0x1D735, 'M', u'∇'), - (0x1D736, 'M', u'α'), - (0x1D737, 'M', u'β'), - (0x1D738, 'M', u'γ'), - (0x1D739, 'M', u'δ'), - (0x1D73A, 'M', u'ε'), - (0x1D73B, 'M', u'ζ'), - (0x1D73C, 'M', u'η'), - (0x1D73D, 'M', u'θ'), - (0x1D73E, 'M', u'ι'), - (0x1D73F, 'M', u'κ'), - (0x1D740, 'M', u'λ'), - (0x1D741, 'M', u'μ'), - (0x1D742, 'M', u'ν'), - (0x1D743, 'M', u'ξ'), - (0x1D744, 'M', u'ο'), - (0x1D745, 'M', u'π'), - (0x1D746, 'M', u'ρ'), - (0x1D747, 'M', u'σ'), - (0x1D749, 'M', u'τ'), - (0x1D74A, 'M', u'υ'), - (0x1D74B, 'M', u'φ'), - (0x1D74C, 'M', u'χ'), - (0x1D74D, 'M', u'ψ'), - (0x1D74E, 'M', u'ω'), - (0x1D74F, 'M', u'∂'), - (0x1D750, 'M', u'ε'), - (0x1D751, 'M', u'θ'), - (0x1D752, 'M', u'κ'), - (0x1D753, 'M', u'φ'), - (0x1D754, 'M', u'ρ'), - (0x1D755, 'M', u'π'), - (0x1D756, 'M', u'α'), - (0x1D757, 'M', u'β'), - (0x1D758, 'M', u'γ'), - (0x1D759, 'M', u'δ'), - (0x1D75A, 'M', u'ε'), - (0x1D75B, 'M', u'ζ'), - (0x1D75C, 'M', u'η'), - (0x1D75D, 'M', u'θ'), - (0x1D75E, 'M', u'ι'), - (0x1D75F, 'M', u'κ'), - (0x1D760, 'M', u'λ'), - (0x1D761, 'M', u'μ'), - (0x1D762, 'M', u'ν'), - (0x1D763, 'M', u'ξ'), - (0x1D764, 'M', u'ο'), - (0x1D765, 'M', u'π'), - (0x1D766, 'M', u'ρ'), - (0x1D767, 'M', u'θ'), - (0x1D768, 'M', u'σ'), - (0x1D769, 'M', u'τ'), - (0x1D76A, 'M', u'υ'), - (0x1D76B, 'M', u'φ'), - (0x1D76C, 'M', u'χ'), - (0x1D76D, 'M', u'ψ'), - (0x1D76E, 'M', u'ω'), - (0x1D76F, 'M', u'∇'), - (0x1D770, 'M', u'α'), - (0x1D771, 'M', u'β'), - (0x1D772, 'M', u'γ'), - (0x1D773, 'M', u'δ'), - (0x1D774, 'M', u'ε'), - (0x1D775, 'M', u'ζ'), - (0x1D776, 'M', u'η'), - (0x1D777, 'M', u'θ'), - (0x1D778, 'M', u'ι'), - (0x1D779, 'M', u'κ'), - (0x1D77A, 'M', u'λ'), - (0x1D77B, 'M', u'μ'), - (0x1D77C, 'M', u'ν'), - (0x1D77D, 'M', u'ξ'), - (0x1D77E, 'M', u'ο'), - (0x1D77F, 'M', u'π'), - (0x1D780, 'M', u'ρ'), - (0x1D781, 'M', u'σ'), - (0x1D783, 'M', u'τ'), - (0x1D784, 'M', u'υ'), - (0x1D785, 'M', u'φ'), - (0x1D786, 'M', u'χ'), - (0x1D787, 'M', u'ψ'), - ] - -def _seg_67(): - return [ - (0x1D788, 'M', u'ω'), - (0x1D789, 'M', u'∂'), - (0x1D78A, 'M', u'ε'), - (0x1D78B, 'M', u'θ'), - (0x1D78C, 'M', u'κ'), - (0x1D78D, 'M', u'φ'), - (0x1D78E, 'M', u'ρ'), - (0x1D78F, 'M', u'π'), - (0x1D790, 'M', u'α'), - (0x1D791, 'M', u'β'), - (0x1D792, 'M', u'γ'), - (0x1D793, 'M', u'δ'), - (0x1D794, 'M', u'ε'), - (0x1D795, 'M', u'ζ'), - (0x1D796, 'M', u'η'), - (0x1D797, 'M', u'θ'), - (0x1D798, 'M', u'ι'), - (0x1D799, 'M', u'κ'), - (0x1D79A, 'M', u'λ'), - (0x1D79B, 'M', u'μ'), - (0x1D79C, 'M', u'ν'), - (0x1D79D, 'M', u'ξ'), - (0x1D79E, 'M', u'ο'), - (0x1D79F, 'M', u'π'), - (0x1D7A0, 'M', u'ρ'), - (0x1D7A1, 'M', u'θ'), - (0x1D7A2, 'M', u'σ'), - (0x1D7A3, 'M', u'τ'), - (0x1D7A4, 'M', u'υ'), - (0x1D7A5, 'M', u'φ'), - (0x1D7A6, 'M', u'χ'), - (0x1D7A7, 'M', u'ψ'), - (0x1D7A8, 'M', u'ω'), - (0x1D7A9, 'M', u'∇'), - (0x1D7AA, 'M', u'α'), - (0x1D7AB, 'M', u'β'), - (0x1D7AC, 'M', u'γ'), - (0x1D7AD, 'M', u'δ'), - (0x1D7AE, 'M', u'ε'), - (0x1D7AF, 'M', u'ζ'), - (0x1D7B0, 'M', u'η'), - (0x1D7B1, 'M', u'θ'), - (0x1D7B2, 'M', u'ι'), - (0x1D7B3, 'M', u'κ'), - (0x1D7B4, 'M', u'λ'), - (0x1D7B5, 'M', u'μ'), - (0x1D7B6, 'M', u'ν'), - (0x1D7B7, 'M', u'ξ'), - (0x1D7B8, 'M', u'ο'), - (0x1D7B9, 'M', u'π'), - (0x1D7BA, 'M', u'ρ'), - (0x1D7BB, 'M', u'σ'), - (0x1D7BD, 'M', u'τ'), - (0x1D7BE, 'M', u'υ'), - (0x1D7BF, 'M', u'φ'), - (0x1D7C0, 'M', u'χ'), - (0x1D7C1, 'M', u'ψ'), - (0x1D7C2, 'M', u'ω'), - (0x1D7C3, 'M', u'∂'), - (0x1D7C4, 'M', u'ε'), - (0x1D7C5, 'M', u'θ'), - (0x1D7C6, 'M', u'κ'), - (0x1D7C7, 'M', u'φ'), - (0x1D7C8, 'M', u'ρ'), - (0x1D7C9, 'M', u'π'), - (0x1D7CA, 'M', u'ϝ'), - (0x1D7CC, 'X'), - (0x1D7CE, 'M', u'0'), - (0x1D7CF, 'M', u'1'), - (0x1D7D0, 'M', u'2'), - (0x1D7D1, 'M', u'3'), - (0x1D7D2, 'M', u'4'), - (0x1D7D3, 'M', u'5'), - (0x1D7D4, 'M', u'6'), - (0x1D7D5, 'M', u'7'), - (0x1D7D6, 'M', u'8'), - (0x1D7D7, 'M', u'9'), - (0x1D7D8, 'M', u'0'), - (0x1D7D9, 'M', u'1'), - (0x1D7DA, 'M', u'2'), - (0x1D7DB, 'M', u'3'), - (0x1D7DC, 'M', u'4'), - (0x1D7DD, 'M', u'5'), - (0x1D7DE, 'M', u'6'), - (0x1D7DF, 'M', u'7'), - (0x1D7E0, 'M', u'8'), - (0x1D7E1, 'M', u'9'), - (0x1D7E2, 'M', u'0'), - (0x1D7E3, 'M', u'1'), - (0x1D7E4, 'M', u'2'), - (0x1D7E5, 'M', u'3'), - (0x1D7E6, 'M', u'4'), - (0x1D7E7, 'M', u'5'), - (0x1D7E8, 'M', u'6'), - (0x1D7E9, 'M', u'7'), - (0x1D7EA, 'M', u'8'), - (0x1D7EB, 'M', u'9'), - (0x1D7EC, 'M', u'0'), - (0x1D7ED, 'M', u'1'), - (0x1D7EE, 'M', u'2'), - ] - -def _seg_68(): - return [ - (0x1D7EF, 'M', u'3'), - (0x1D7F0, 'M', u'4'), - (0x1D7F1, 'M', u'5'), - (0x1D7F2, 'M', u'6'), - (0x1D7F3, 'M', u'7'), - (0x1D7F4, 'M', u'8'), - (0x1D7F5, 'M', u'9'), - (0x1D7F6, 'M', u'0'), - (0x1D7F7, 'M', u'1'), - (0x1D7F8, 'M', u'2'), - (0x1D7F9, 'M', u'3'), - (0x1D7FA, 'M', u'4'), - (0x1D7FB, 'M', u'5'), - (0x1D7FC, 'M', u'6'), - (0x1D7FD, 'M', u'7'), - (0x1D7FE, 'M', u'8'), - (0x1D7FF, 'M', u'9'), - (0x1D800, 'V'), - (0x1DA8C, 'X'), - (0x1DA9B, 'V'), - (0x1DAA0, 'X'), - (0x1DAA1, 'V'), - (0x1DAB0, 'X'), - (0x1E000, 'V'), - (0x1E007, 'X'), - (0x1E008, 'V'), - (0x1E019, 'X'), - (0x1E01B, 'V'), - (0x1E022, 'X'), - (0x1E023, 'V'), - (0x1E025, 'X'), - (0x1E026, 'V'), - (0x1E02B, 'X'), - (0x1E800, 'V'), - (0x1E8C5, 'X'), - (0x1E8C7, 'V'), - (0x1E8D7, 'X'), - (0x1E900, 'M', u'𞤢'), - (0x1E901, 'M', u'𞤣'), - (0x1E902, 'M', u'𞤤'), - (0x1E903, 'M', u'𞤥'), - (0x1E904, 'M', u'𞤦'), - (0x1E905, 'M', u'𞤧'), - (0x1E906, 'M', u'𞤨'), - (0x1E907, 'M', u'𞤩'), - (0x1E908, 'M', u'𞤪'), - (0x1E909, 'M', u'𞤫'), - (0x1E90A, 'M', u'𞤬'), - (0x1E90B, 'M', u'𞤭'), - (0x1E90C, 'M', u'𞤮'), - (0x1E90D, 'M', u'𞤯'), - (0x1E90E, 'M', u'𞤰'), - (0x1E90F, 'M', u'𞤱'), - (0x1E910, 'M', u'𞤲'), - (0x1E911, 'M', u'𞤳'), - (0x1E912, 'M', u'𞤴'), - (0x1E913, 'M', u'𞤵'), - (0x1E914, 'M', u'𞤶'), - (0x1E915, 'M', u'𞤷'), - (0x1E916, 'M', u'𞤸'), - (0x1E917, 'M', u'𞤹'), - (0x1E918, 'M', u'𞤺'), - (0x1E919, 'M', u'𞤻'), - (0x1E91A, 'M', u'𞤼'), - (0x1E91B, 'M', u'𞤽'), - (0x1E91C, 'M', u'𞤾'), - (0x1E91D, 'M', u'𞤿'), - (0x1E91E, 'M', u'𞥀'), - (0x1E91F, 'M', u'𞥁'), - (0x1E920, 'M', u'𞥂'), - (0x1E921, 'M', u'𞥃'), - (0x1E922, 'V'), - (0x1E94B, 'X'), - (0x1E950, 'V'), - (0x1E95A, 'X'), - (0x1E95E, 'V'), - (0x1E960, 'X'), - (0x1EC71, 'V'), - (0x1ECB5, 'X'), - (0x1EE00, 'M', u'ا'), - (0x1EE01, 'M', u'ب'), - (0x1EE02, 'M', u'ج'), - (0x1EE03, 'M', u'د'), - (0x1EE04, 'X'), - (0x1EE05, 'M', u'و'), - (0x1EE06, 'M', u'ز'), - (0x1EE07, 'M', u'ح'), - (0x1EE08, 'M', u'ط'), - (0x1EE09, 'M', u'ي'), - (0x1EE0A, 'M', u'ك'), - (0x1EE0B, 'M', u'ل'), - (0x1EE0C, 'M', u'م'), - (0x1EE0D, 'M', u'ن'), - (0x1EE0E, 'M', u'س'), - (0x1EE0F, 'M', u'ع'), - (0x1EE10, 'M', u'ف'), - (0x1EE11, 'M', u'ص'), - (0x1EE12, 'M', u'ق'), - (0x1EE13, 'M', u'ر'), - (0x1EE14, 'M', u'ش'), - ] - -def _seg_69(): - return [ - (0x1EE15, 'M', u'ت'), - (0x1EE16, 'M', u'ث'), - (0x1EE17, 'M', u'خ'), - (0x1EE18, 'M', u'ذ'), - (0x1EE19, 'M', u'ض'), - (0x1EE1A, 'M', u'ظ'), - (0x1EE1B, 'M', u'غ'), - (0x1EE1C, 'M', u'ٮ'), - (0x1EE1D, 'M', u'ں'), - (0x1EE1E, 'M', u'ڡ'), - (0x1EE1F, 'M', u'ٯ'), - (0x1EE20, 'X'), - (0x1EE21, 'M', u'ب'), - (0x1EE22, 'M', u'ج'), - (0x1EE23, 'X'), - (0x1EE24, 'M', u'ه'), - (0x1EE25, 'X'), - (0x1EE27, 'M', u'ح'), - (0x1EE28, 'X'), - (0x1EE29, 'M', u'ي'), - (0x1EE2A, 'M', u'ك'), - (0x1EE2B, 'M', u'ل'), - (0x1EE2C, 'M', u'م'), - (0x1EE2D, 'M', u'ن'), - (0x1EE2E, 'M', u'س'), - (0x1EE2F, 'M', u'ع'), - (0x1EE30, 'M', u'ف'), - (0x1EE31, 'M', u'ص'), - (0x1EE32, 'M', u'ق'), - (0x1EE33, 'X'), - (0x1EE34, 'M', u'ش'), - (0x1EE35, 'M', u'ت'), - (0x1EE36, 'M', u'ث'), - (0x1EE37, 'M', u'خ'), - (0x1EE38, 'X'), - (0x1EE39, 'M', u'ض'), - (0x1EE3A, 'X'), - (0x1EE3B, 'M', u'غ'), - (0x1EE3C, 'X'), - (0x1EE42, 'M', u'ج'), - (0x1EE43, 'X'), - (0x1EE47, 'M', u'ح'), - (0x1EE48, 'X'), - (0x1EE49, 'M', u'ي'), - (0x1EE4A, 'X'), - (0x1EE4B, 'M', u'ل'), - (0x1EE4C, 'X'), - (0x1EE4D, 'M', u'ن'), - (0x1EE4E, 'M', u'س'), - (0x1EE4F, 'M', u'ع'), - (0x1EE50, 'X'), - (0x1EE51, 'M', u'ص'), - (0x1EE52, 'M', u'ق'), - (0x1EE53, 'X'), - (0x1EE54, 'M', u'ش'), - (0x1EE55, 'X'), - (0x1EE57, 'M', u'خ'), - (0x1EE58, 'X'), - (0x1EE59, 'M', u'ض'), - (0x1EE5A, 'X'), - (0x1EE5B, 'M', u'غ'), - (0x1EE5C, 'X'), - (0x1EE5D, 'M', u'ں'), - (0x1EE5E, 'X'), - (0x1EE5F, 'M', u'ٯ'), - (0x1EE60, 'X'), - (0x1EE61, 'M', u'ب'), - (0x1EE62, 'M', u'ج'), - (0x1EE63, 'X'), - (0x1EE64, 'M', u'ه'), - (0x1EE65, 'X'), - (0x1EE67, 'M', u'ح'), - (0x1EE68, 'M', u'ط'), - (0x1EE69, 'M', u'ي'), - (0x1EE6A, 'M', u'ك'), - (0x1EE6B, 'X'), - (0x1EE6C, 'M', u'م'), - (0x1EE6D, 'M', u'ن'), - (0x1EE6E, 'M', u'س'), - (0x1EE6F, 'M', u'ع'), - (0x1EE70, 'M', u'ف'), - (0x1EE71, 'M', u'ص'), - (0x1EE72, 'M', u'ق'), - (0x1EE73, 'X'), - (0x1EE74, 'M', u'ش'), - (0x1EE75, 'M', u'ت'), - (0x1EE76, 'M', u'ث'), - (0x1EE77, 'M', u'خ'), - (0x1EE78, 'X'), - (0x1EE79, 'M', u'ض'), - (0x1EE7A, 'M', u'ظ'), - (0x1EE7B, 'M', u'غ'), - (0x1EE7C, 'M', u'ٮ'), - (0x1EE7D, 'X'), - (0x1EE7E, 'M', u'ڡ'), - (0x1EE7F, 'X'), - (0x1EE80, 'M', u'ا'), - (0x1EE81, 'M', u'ب'), - (0x1EE82, 'M', u'ج'), - (0x1EE83, 'M', u'د'), - ] - -def _seg_70(): - return [ - (0x1EE84, 'M', u'ه'), - (0x1EE85, 'M', u'و'), - (0x1EE86, 'M', u'ز'), - (0x1EE87, 'M', u'ح'), - (0x1EE88, 'M', u'ط'), - (0x1EE89, 'M', u'ي'), - (0x1EE8A, 'X'), - (0x1EE8B, 'M', u'ل'), - (0x1EE8C, 'M', u'م'), - (0x1EE8D, 'M', u'ن'), - (0x1EE8E, 'M', u'س'), - (0x1EE8F, 'M', u'ع'), - (0x1EE90, 'M', u'ف'), - (0x1EE91, 'M', u'ص'), - (0x1EE92, 'M', u'ق'), - (0x1EE93, 'M', u'ر'), - (0x1EE94, 'M', u'ش'), - (0x1EE95, 'M', u'ت'), - (0x1EE96, 'M', u'ث'), - (0x1EE97, 'M', u'خ'), - (0x1EE98, 'M', u'ذ'), - (0x1EE99, 'M', u'ض'), - (0x1EE9A, 'M', u'ظ'), - (0x1EE9B, 'M', u'غ'), - (0x1EE9C, 'X'), - (0x1EEA1, 'M', u'ب'), - (0x1EEA2, 'M', u'ج'), - (0x1EEA3, 'M', u'د'), - (0x1EEA4, 'X'), - (0x1EEA5, 'M', u'و'), - (0x1EEA6, 'M', u'ز'), - (0x1EEA7, 'M', u'ح'), - (0x1EEA8, 'M', u'ط'), - (0x1EEA9, 'M', u'ي'), - (0x1EEAA, 'X'), - (0x1EEAB, 'M', u'ل'), - (0x1EEAC, 'M', u'م'), - (0x1EEAD, 'M', u'ن'), - (0x1EEAE, 'M', u'س'), - (0x1EEAF, 'M', u'ع'), - (0x1EEB0, 'M', u'ف'), - (0x1EEB1, 'M', u'ص'), - (0x1EEB2, 'M', u'ق'), - (0x1EEB3, 'M', u'ر'), - (0x1EEB4, 'M', u'ش'), - (0x1EEB5, 'M', u'ت'), - (0x1EEB6, 'M', u'ث'), - (0x1EEB7, 'M', u'خ'), - (0x1EEB8, 'M', u'ذ'), - (0x1EEB9, 'M', u'ض'), - (0x1EEBA, 'M', u'ظ'), - (0x1EEBB, 'M', u'غ'), - (0x1EEBC, 'X'), - (0x1EEF0, 'V'), - (0x1EEF2, 'X'), - (0x1F000, 'V'), - (0x1F02C, 'X'), - (0x1F030, 'V'), - (0x1F094, 'X'), - (0x1F0A0, 'V'), - (0x1F0AF, 'X'), - (0x1F0B1, 'V'), - (0x1F0C0, 'X'), - (0x1F0C1, 'V'), - (0x1F0D0, 'X'), - (0x1F0D1, 'V'), - (0x1F0F6, 'X'), - (0x1F101, '3', u'0,'), - (0x1F102, '3', u'1,'), - (0x1F103, '3', u'2,'), - (0x1F104, '3', u'3,'), - (0x1F105, '3', u'4,'), - (0x1F106, '3', u'5,'), - (0x1F107, '3', u'6,'), - (0x1F108, '3', u'7,'), - (0x1F109, '3', u'8,'), - (0x1F10A, '3', u'9,'), - (0x1F10B, 'V'), - (0x1F10D, 'X'), - (0x1F110, '3', u'(a)'), - (0x1F111, '3', u'(b)'), - (0x1F112, '3', u'(c)'), - (0x1F113, '3', u'(d)'), - (0x1F114, '3', u'(e)'), - (0x1F115, '3', u'(f)'), - (0x1F116, '3', u'(g)'), - (0x1F117, '3', u'(h)'), - (0x1F118, '3', u'(i)'), - (0x1F119, '3', u'(j)'), - (0x1F11A, '3', u'(k)'), - (0x1F11B, '3', u'(l)'), - (0x1F11C, '3', u'(m)'), - (0x1F11D, '3', u'(n)'), - (0x1F11E, '3', u'(o)'), - (0x1F11F, '3', u'(p)'), - (0x1F120, '3', u'(q)'), - (0x1F121, '3', u'(r)'), - (0x1F122, '3', u'(s)'), - (0x1F123, '3', u'(t)'), - (0x1F124, '3', u'(u)'), - ] - -def _seg_71(): - return [ - (0x1F125, '3', u'(v)'), - (0x1F126, '3', u'(w)'), - (0x1F127, '3', u'(x)'), - (0x1F128, '3', u'(y)'), - (0x1F129, '3', u'(z)'), - (0x1F12A, 'M', u'〔s〕'), - (0x1F12B, 'M', u'c'), - (0x1F12C, 'M', u'r'), - (0x1F12D, 'M', u'cd'), - (0x1F12E, 'M', u'wz'), - (0x1F12F, 'V'), - (0x1F130, 'M', u'a'), - (0x1F131, 'M', u'b'), - (0x1F132, 'M', u'c'), - (0x1F133, 'M', u'd'), - (0x1F134, 'M', u'e'), - (0x1F135, 'M', u'f'), - (0x1F136, 'M', u'g'), - (0x1F137, 'M', u'h'), - (0x1F138, 'M', u'i'), - (0x1F139, 'M', u'j'), - (0x1F13A, 'M', u'k'), - (0x1F13B, 'M', u'l'), - (0x1F13C, 'M', u'm'), - (0x1F13D, 'M', u'n'), - (0x1F13E, 'M', u'o'), - (0x1F13F, 'M', u'p'), - (0x1F140, 'M', u'q'), - (0x1F141, 'M', u'r'), - (0x1F142, 'M', u's'), - (0x1F143, 'M', u't'), - (0x1F144, 'M', u'u'), - (0x1F145, 'M', u'v'), - (0x1F146, 'M', u'w'), - (0x1F147, 'M', u'x'), - (0x1F148, 'M', u'y'), - (0x1F149, 'M', u'z'), - (0x1F14A, 'M', u'hv'), - (0x1F14B, 'M', u'mv'), - (0x1F14C, 'M', u'sd'), - (0x1F14D, 'M', u'ss'), - (0x1F14E, 'M', u'ppv'), - (0x1F14F, 'M', u'wc'), - (0x1F150, 'V'), - (0x1F16A, 'M', u'mc'), - (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), - (0x1F190, 'M', u'dj'), - (0x1F191, 'V'), - (0x1F1AD, 'X'), - (0x1F1E6, 'V'), - (0x1F200, 'M', u'ほか'), - (0x1F201, 'M', u'ココ'), - (0x1F202, 'M', u'サ'), - (0x1F203, 'X'), - (0x1F210, 'M', u'手'), - (0x1F211, 'M', u'字'), - (0x1F212, 'M', u'双'), - (0x1F213, 'M', u'デ'), - (0x1F214, 'M', u'二'), - (0x1F215, 'M', u'多'), - (0x1F216, 'M', u'解'), - (0x1F217, 'M', u'天'), - (0x1F218, 'M', u'交'), - (0x1F219, 'M', u'映'), - (0x1F21A, 'M', u'無'), - (0x1F21B, 'M', u'料'), - (0x1F21C, 'M', u'前'), - (0x1F21D, 'M', u'後'), - (0x1F21E, 'M', u'再'), - (0x1F21F, 'M', u'新'), - (0x1F220, 'M', u'初'), - (0x1F221, 'M', u'終'), - (0x1F222, 'M', u'生'), - (0x1F223, 'M', u'販'), - (0x1F224, 'M', u'声'), - (0x1F225, 'M', u'吹'), - (0x1F226, 'M', u'演'), - (0x1F227, 'M', u'投'), - (0x1F228, 'M', u'捕'), - (0x1F229, 'M', u'一'), - (0x1F22A, 'M', u'三'), - (0x1F22B, 'M', u'遊'), - (0x1F22C, 'M', u'左'), - (0x1F22D, 'M', u'中'), - (0x1F22E, 'M', u'右'), - (0x1F22F, 'M', u'指'), - (0x1F230, 'M', u'走'), - (0x1F231, 'M', u'打'), - (0x1F232, 'M', u'禁'), - (0x1F233, 'M', u'空'), - (0x1F234, 'M', u'合'), - (0x1F235, 'M', u'満'), - (0x1F236, 'M', u'有'), - (0x1F237, 'M', u'月'), - (0x1F238, 'M', u'申'), - (0x1F239, 'M', u'割'), - (0x1F23A, 'M', u'営'), - (0x1F23B, 'M', u'配'), - ] - -def _seg_72(): - return [ - (0x1F23C, 'X'), - (0x1F240, 'M', u'〔本〕'), - (0x1F241, 'M', u'〔三〕'), - (0x1F242, 'M', u'〔二〕'), - (0x1F243, 'M', u'〔安〕'), - (0x1F244, 'M', u'〔点〕'), - (0x1F245, 'M', u'〔打〕'), - (0x1F246, 'M', u'〔盗〕'), - (0x1F247, 'M', u'〔勝〕'), - (0x1F248, 'M', u'〔敗〕'), - (0x1F249, 'X'), - (0x1F250, 'M', u'得'), - (0x1F251, 'M', u'可'), - (0x1F252, 'X'), - (0x1F260, 'V'), - (0x1F266, 'X'), - (0x1F300, 'V'), - (0x1F6D5, 'X'), - (0x1F6E0, 'V'), - (0x1F6ED, 'X'), - (0x1F6F0, 'V'), - (0x1F6FA, 'X'), - (0x1F700, 'V'), - (0x1F774, 'X'), - (0x1F780, 'V'), - (0x1F7D9, 'X'), - (0x1F800, 'V'), - (0x1F80C, 'X'), - (0x1F810, 'V'), - (0x1F848, 'X'), - (0x1F850, 'V'), - (0x1F85A, 'X'), - (0x1F860, 'V'), - (0x1F888, 'X'), - (0x1F890, 'V'), - (0x1F8AE, 'X'), - (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F910, 'V'), - (0x1F93F, 'X'), - (0x1F940, 'V'), - (0x1F971, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), - (0x1F97A, 'V'), - (0x1F97B, 'X'), - (0x1F97C, 'V'), - (0x1F9A3, 'X'), - (0x1F9B0, 'V'), - (0x1F9BA, 'X'), - (0x1F9C0, 'V'), - (0x1F9C3, 'X'), - (0x1F9D0, 'V'), - (0x1FA00, 'X'), - (0x1FA60, 'V'), - (0x1FA6E, 'X'), - (0x20000, 'V'), - (0x2A6D7, 'X'), - (0x2A700, 'V'), - (0x2B735, 'X'), - (0x2B740, 'V'), - (0x2B81E, 'X'), - (0x2B820, 'V'), - (0x2CEA2, 'X'), - (0x2CEB0, 'V'), - (0x2EBE1, 'X'), - (0x2F800, 'M', u'丽'), - (0x2F801, 'M', u'丸'), - (0x2F802, 'M', u'乁'), - (0x2F803, 'M', u'𠄢'), - (0x2F804, 'M', u'你'), - (0x2F805, 'M', u'侮'), - (0x2F806, 'M', u'侻'), - (0x2F807, 'M', u'倂'), - (0x2F808, 'M', u'偺'), - (0x2F809, 'M', u'備'), - (0x2F80A, 'M', u'僧'), - (0x2F80B, 'M', u'像'), - (0x2F80C, 'M', u'㒞'), - (0x2F80D, 'M', u'𠘺'), - (0x2F80E, 'M', u'免'), - (0x2F80F, 'M', u'兔'), - (0x2F810, 'M', u'兤'), - (0x2F811, 'M', u'具'), - (0x2F812, 'M', u'𠔜'), - (0x2F813, 'M', u'㒹'), - (0x2F814, 'M', u'內'), - (0x2F815, 'M', u'再'), - (0x2F816, 'M', u'𠕋'), - (0x2F817, 'M', u'冗'), - (0x2F818, 'M', u'冤'), - (0x2F819, 'M', u'仌'), - (0x2F81A, 'M', u'冬'), - (0x2F81B, 'M', u'况'), - (0x2F81C, 'M', u'𩇟'), - (0x2F81D, 'M', u'凵'), - (0x2F81E, 'M', u'刃'), - (0x2F81F, 'M', u'㓟'), - (0x2F820, 'M', u'刻'), - (0x2F821, 'M', u'剆'), - ] - -def _seg_73(): - return [ - (0x2F822, 'M', u'割'), - (0x2F823, 'M', u'剷'), - (0x2F824, 'M', u'㔕'), - (0x2F825, 'M', u'勇'), - (0x2F826, 'M', u'勉'), - (0x2F827, 'M', u'勤'), - (0x2F828, 'M', u'勺'), - (0x2F829, 'M', u'包'), - (0x2F82A, 'M', u'匆'), - (0x2F82B, 'M', u'北'), - (0x2F82C, 'M', u'卉'), - (0x2F82D, 'M', u'卑'), - (0x2F82E, 'M', u'博'), - (0x2F82F, 'M', u'即'), - (0x2F830, 'M', u'卽'), - (0x2F831, 'M', u'卿'), - (0x2F834, 'M', u'𠨬'), - (0x2F835, 'M', u'灰'), - (0x2F836, 'M', u'及'), - (0x2F837, 'M', u'叟'), - (0x2F838, 'M', u'𠭣'), - (0x2F839, 'M', u'叫'), - (0x2F83A, 'M', u'叱'), - (0x2F83B, 'M', u'吆'), - (0x2F83C, 'M', u'咞'), - (0x2F83D, 'M', u'吸'), - (0x2F83E, 'M', u'呈'), - (0x2F83F, 'M', u'周'), - (0x2F840, 'M', u'咢'), - (0x2F841, 'M', u'哶'), - (0x2F842, 'M', u'唐'), - (0x2F843, 'M', u'啓'), - (0x2F844, 'M', u'啣'), - (0x2F845, 'M', u'善'), - (0x2F847, 'M', u'喙'), - (0x2F848, 'M', u'喫'), - (0x2F849, 'M', u'喳'), - (0x2F84A, 'M', u'嗂'), - (0x2F84B, 'M', u'圖'), - (0x2F84C, 'M', u'嘆'), - (0x2F84D, 'M', u'圗'), - (0x2F84E, 'M', u'噑'), - (0x2F84F, 'M', u'噴'), - (0x2F850, 'M', u'切'), - (0x2F851, 'M', u'壮'), - (0x2F852, 'M', u'城'), - (0x2F853, 'M', u'埴'), - (0x2F854, 'M', u'堍'), - (0x2F855, 'M', u'型'), - (0x2F856, 'M', u'堲'), - (0x2F857, 'M', u'報'), - (0x2F858, 'M', u'墬'), - (0x2F859, 'M', u'𡓤'), - (0x2F85A, 'M', u'売'), - (0x2F85B, 'M', u'壷'), - (0x2F85C, 'M', u'夆'), - (0x2F85D, 'M', u'多'), - (0x2F85E, 'M', u'夢'), - (0x2F85F, 'M', u'奢'), - (0x2F860, 'M', u'𡚨'), - (0x2F861, 'M', u'𡛪'), - (0x2F862, 'M', u'姬'), - (0x2F863, 'M', u'娛'), - (0x2F864, 'M', u'娧'), - (0x2F865, 'M', u'姘'), - (0x2F866, 'M', u'婦'), - (0x2F867, 'M', u'㛮'), - (0x2F868, 'X'), - (0x2F869, 'M', u'嬈'), - (0x2F86A, 'M', u'嬾'), - (0x2F86C, 'M', u'𡧈'), - (0x2F86D, 'M', u'寃'), - (0x2F86E, 'M', u'寘'), - (0x2F86F, 'M', u'寧'), - (0x2F870, 'M', u'寳'), - (0x2F871, 'M', u'𡬘'), - (0x2F872, 'M', u'寿'), - (0x2F873, 'M', u'将'), - (0x2F874, 'X'), - (0x2F875, 'M', u'尢'), - (0x2F876, 'M', u'㞁'), - (0x2F877, 'M', u'屠'), - (0x2F878, 'M', u'屮'), - (0x2F879, 'M', u'峀'), - (0x2F87A, 'M', u'岍'), - (0x2F87B, 'M', u'𡷤'), - (0x2F87C, 'M', u'嵃'), - (0x2F87D, 'M', u'𡷦'), - (0x2F87E, 'M', u'嵮'), - (0x2F87F, 'M', u'嵫'), - (0x2F880, 'M', u'嵼'), - (0x2F881, 'M', u'巡'), - (0x2F882, 'M', u'巢'), - (0x2F883, 'M', u'㠯'), - (0x2F884, 'M', u'巽'), - (0x2F885, 'M', u'帨'), - (0x2F886, 'M', u'帽'), - (0x2F887, 'M', u'幩'), - (0x2F888, 'M', u'㡢'), - (0x2F889, 'M', u'𢆃'), - ] - -def _seg_74(): - return [ - (0x2F88A, 'M', u'㡼'), - (0x2F88B, 'M', u'庰'), - (0x2F88C, 'M', u'庳'), - (0x2F88D, 'M', u'庶'), - (0x2F88E, 'M', u'廊'), - (0x2F88F, 'M', u'𪎒'), - (0x2F890, 'M', u'廾'), - (0x2F891, 'M', u'𢌱'), - (0x2F893, 'M', u'舁'), - (0x2F894, 'M', u'弢'), - (0x2F896, 'M', u'㣇'), - (0x2F897, 'M', u'𣊸'), - (0x2F898, 'M', u'𦇚'), - (0x2F899, 'M', u'形'), - (0x2F89A, 'M', u'彫'), - (0x2F89B, 'M', u'㣣'), - (0x2F89C, 'M', u'徚'), - (0x2F89D, 'M', u'忍'), - (0x2F89E, 'M', u'志'), - (0x2F89F, 'M', u'忹'), - (0x2F8A0, 'M', u'悁'), - (0x2F8A1, 'M', u'㤺'), - (0x2F8A2, 'M', u'㤜'), - (0x2F8A3, 'M', u'悔'), - (0x2F8A4, 'M', u'𢛔'), - (0x2F8A5, 'M', u'惇'), - (0x2F8A6, 'M', u'慈'), - (0x2F8A7, 'M', u'慌'), - (0x2F8A8, 'M', u'慎'), - (0x2F8A9, 'M', u'慌'), - (0x2F8AA, 'M', u'慺'), - (0x2F8AB, 'M', u'憎'), - (0x2F8AC, 'M', u'憲'), - (0x2F8AD, 'M', u'憤'), - (0x2F8AE, 'M', u'憯'), - (0x2F8AF, 'M', u'懞'), - (0x2F8B0, 'M', u'懲'), - (0x2F8B1, 'M', u'懶'), - (0x2F8B2, 'M', u'成'), - (0x2F8B3, 'M', u'戛'), - (0x2F8B4, 'M', u'扝'), - (0x2F8B5, 'M', u'抱'), - (0x2F8B6, 'M', u'拔'), - (0x2F8B7, 'M', u'捐'), - (0x2F8B8, 'M', u'𢬌'), - (0x2F8B9, 'M', u'挽'), - (0x2F8BA, 'M', u'拼'), - (0x2F8BB, 'M', u'捨'), - (0x2F8BC, 'M', u'掃'), - (0x2F8BD, 'M', u'揤'), - (0x2F8BE, 'M', u'𢯱'), - (0x2F8BF, 'M', u'搢'), - (0x2F8C0, 'M', u'揅'), - (0x2F8C1, 'M', u'掩'), - (0x2F8C2, 'M', u'㨮'), - (0x2F8C3, 'M', u'摩'), - (0x2F8C4, 'M', u'摾'), - (0x2F8C5, 'M', u'撝'), - (0x2F8C6, 'M', u'摷'), - (0x2F8C7, 'M', u'㩬'), - (0x2F8C8, 'M', u'敏'), - (0x2F8C9, 'M', u'敬'), - (0x2F8CA, 'M', u'𣀊'), - (0x2F8CB, 'M', u'旣'), - (0x2F8CC, 'M', u'書'), - (0x2F8CD, 'M', u'晉'), - (0x2F8CE, 'M', u'㬙'), - (0x2F8CF, 'M', u'暑'), - (0x2F8D0, 'M', u'㬈'), - (0x2F8D1, 'M', u'㫤'), - (0x2F8D2, 'M', u'冒'), - (0x2F8D3, 'M', u'冕'), - (0x2F8D4, 'M', u'最'), - (0x2F8D5, 'M', u'暜'), - (0x2F8D6, 'M', u'肭'), - (0x2F8D7, 'M', u'䏙'), - (0x2F8D8, 'M', u'朗'), - (0x2F8D9, 'M', u'望'), - (0x2F8DA, 'M', u'朡'), - (0x2F8DB, 'M', u'杞'), - (0x2F8DC, 'M', u'杓'), - (0x2F8DD, 'M', u'𣏃'), - (0x2F8DE, 'M', u'㭉'), - (0x2F8DF, 'M', u'柺'), - (0x2F8E0, 'M', u'枅'), - (0x2F8E1, 'M', u'桒'), - (0x2F8E2, 'M', u'梅'), - (0x2F8E3, 'M', u'𣑭'), - (0x2F8E4, 'M', u'梎'), - (0x2F8E5, 'M', u'栟'), - (0x2F8E6, 'M', u'椔'), - (0x2F8E7, 'M', u'㮝'), - (0x2F8E8, 'M', u'楂'), - (0x2F8E9, 'M', u'榣'), - (0x2F8EA, 'M', u'槪'), - (0x2F8EB, 'M', u'檨'), - (0x2F8EC, 'M', u'𣚣'), - (0x2F8ED, 'M', u'櫛'), - (0x2F8EE, 'M', u'㰘'), - (0x2F8EF, 'M', u'次'), - ] - -def _seg_75(): - return [ - (0x2F8F0, 'M', u'𣢧'), - (0x2F8F1, 'M', u'歔'), - (0x2F8F2, 'M', u'㱎'), - (0x2F8F3, 'M', u'歲'), - (0x2F8F4, 'M', u'殟'), - (0x2F8F5, 'M', u'殺'), - (0x2F8F6, 'M', u'殻'), - (0x2F8F7, 'M', u'𣪍'), - (0x2F8F8, 'M', u'𡴋'), - (0x2F8F9, 'M', u'𣫺'), - (0x2F8FA, 'M', u'汎'), - (0x2F8FB, 'M', u'𣲼'), - (0x2F8FC, 'M', u'沿'), - (0x2F8FD, 'M', u'泍'), - (0x2F8FE, 'M', u'汧'), - (0x2F8FF, 'M', u'洖'), - (0x2F900, 'M', u'派'), - (0x2F901, 'M', u'海'), - (0x2F902, 'M', u'流'), - (0x2F903, 'M', u'浩'), - (0x2F904, 'M', u'浸'), - (0x2F905, 'M', u'涅'), - (0x2F906, 'M', u'𣴞'), - (0x2F907, 'M', u'洴'), - (0x2F908, 'M', u'港'), - (0x2F909, 'M', u'湮'), - (0x2F90A, 'M', u'㴳'), - (0x2F90B, 'M', u'滋'), - (0x2F90C, 'M', u'滇'), - (0x2F90D, 'M', u'𣻑'), - (0x2F90E, 'M', u'淹'), - (0x2F90F, 'M', u'潮'), - (0x2F910, 'M', u'𣽞'), - (0x2F911, 'M', u'𣾎'), - (0x2F912, 'M', u'濆'), - (0x2F913, 'M', u'瀹'), - (0x2F914, 'M', u'瀞'), - (0x2F915, 'M', u'瀛'), - (0x2F916, 'M', u'㶖'), - (0x2F917, 'M', u'灊'), - (0x2F918, 'M', u'災'), - (0x2F919, 'M', u'灷'), - (0x2F91A, 'M', u'炭'), - (0x2F91B, 'M', u'𠔥'), - (0x2F91C, 'M', u'煅'), - (0x2F91D, 'M', u'𤉣'), - (0x2F91E, 'M', u'熜'), - (0x2F91F, 'X'), - (0x2F920, 'M', u'爨'), - (0x2F921, 'M', u'爵'), - (0x2F922, 'M', u'牐'), - (0x2F923, 'M', u'𤘈'), - (0x2F924, 'M', u'犀'), - (0x2F925, 'M', u'犕'), - (0x2F926, 'M', u'𤜵'), - (0x2F927, 'M', u'𤠔'), - (0x2F928, 'M', u'獺'), - (0x2F929, 'M', u'王'), - (0x2F92A, 'M', u'㺬'), - (0x2F92B, 'M', u'玥'), - (0x2F92C, 'M', u'㺸'), - (0x2F92E, 'M', u'瑇'), - (0x2F92F, 'M', u'瑜'), - (0x2F930, 'M', u'瑱'), - (0x2F931, 'M', u'璅'), - (0x2F932, 'M', u'瓊'), - (0x2F933, 'M', u'㼛'), - (0x2F934, 'M', u'甤'), - (0x2F935, 'M', u'𤰶'), - (0x2F936, 'M', u'甾'), - (0x2F937, 'M', u'𤲒'), - (0x2F938, 'M', u'異'), - (0x2F939, 'M', u'𢆟'), - (0x2F93A, 'M', u'瘐'), - (0x2F93B, 'M', u'𤾡'), - (0x2F93C, 'M', u'𤾸'), - (0x2F93D, 'M', u'𥁄'), - (0x2F93E, 'M', u'㿼'), - (0x2F93F, 'M', u'䀈'), - (0x2F940, 'M', u'直'), - (0x2F941, 'M', u'𥃳'), - (0x2F942, 'M', u'𥃲'), - (0x2F943, 'M', u'𥄙'), - (0x2F944, 'M', u'𥄳'), - (0x2F945, 'M', u'眞'), - (0x2F946, 'M', u'真'), - (0x2F948, 'M', u'睊'), - (0x2F949, 'M', u'䀹'), - (0x2F94A, 'M', u'瞋'), - (0x2F94B, 'M', u'䁆'), - (0x2F94C, 'M', u'䂖'), - (0x2F94D, 'M', u'𥐝'), - (0x2F94E, 'M', u'硎'), - (0x2F94F, 'M', u'碌'), - (0x2F950, 'M', u'磌'), - (0x2F951, 'M', u'䃣'), - (0x2F952, 'M', u'𥘦'), - (0x2F953, 'M', u'祖'), - (0x2F954, 'M', u'𥚚'), - (0x2F955, 'M', u'𥛅'), - ] - -def _seg_76(): - return [ - (0x2F956, 'M', u'福'), - (0x2F957, 'M', u'秫'), - (0x2F958, 'M', u'䄯'), - (0x2F959, 'M', u'穀'), - (0x2F95A, 'M', u'穊'), - (0x2F95B, 'M', u'穏'), - (0x2F95C, 'M', u'𥥼'), - (0x2F95D, 'M', u'𥪧'), - (0x2F95F, 'X'), - (0x2F960, 'M', u'䈂'), - (0x2F961, 'M', u'𥮫'), - (0x2F962, 'M', u'篆'), - (0x2F963, 'M', u'築'), - (0x2F964, 'M', u'䈧'), - (0x2F965, 'M', u'𥲀'), - (0x2F966, 'M', u'糒'), - (0x2F967, 'M', u'䊠'), - (0x2F968, 'M', u'糨'), - (0x2F969, 'M', u'糣'), - (0x2F96A, 'M', u'紀'), - (0x2F96B, 'M', u'𥾆'), - (0x2F96C, 'M', u'絣'), - (0x2F96D, 'M', u'䌁'), - (0x2F96E, 'M', u'緇'), - (0x2F96F, 'M', u'縂'), - (0x2F970, 'M', u'繅'), - (0x2F971, 'M', u'䌴'), - (0x2F972, 'M', u'𦈨'), - (0x2F973, 'M', u'𦉇'), - (0x2F974, 'M', u'䍙'), - (0x2F975, 'M', u'𦋙'), - (0x2F976, 'M', u'罺'), - (0x2F977, 'M', u'𦌾'), - (0x2F978, 'M', u'羕'), - (0x2F979, 'M', u'翺'), - (0x2F97A, 'M', u'者'), - (0x2F97B, 'M', u'𦓚'), - (0x2F97C, 'M', u'𦔣'), - (0x2F97D, 'M', u'聠'), - (0x2F97E, 'M', u'𦖨'), - (0x2F97F, 'M', u'聰'), - (0x2F980, 'M', u'𣍟'), - (0x2F981, 'M', u'䏕'), - (0x2F982, 'M', u'育'), - (0x2F983, 'M', u'脃'), - (0x2F984, 'M', u'䐋'), - (0x2F985, 'M', u'脾'), - (0x2F986, 'M', u'媵'), - (0x2F987, 'M', u'𦞧'), - (0x2F988, 'M', u'𦞵'), - (0x2F989, 'M', u'𣎓'), - (0x2F98A, 'M', u'𣎜'), - (0x2F98B, 'M', u'舁'), - (0x2F98C, 'M', u'舄'), - (0x2F98D, 'M', u'辞'), - (0x2F98E, 'M', u'䑫'), - (0x2F98F, 'M', u'芑'), - (0x2F990, 'M', u'芋'), - (0x2F991, 'M', u'芝'), - (0x2F992, 'M', u'劳'), - (0x2F993, 'M', u'花'), - (0x2F994, 'M', u'芳'), - (0x2F995, 'M', u'芽'), - (0x2F996, 'M', u'苦'), - (0x2F997, 'M', u'𦬼'), - (0x2F998, 'M', u'若'), - (0x2F999, 'M', u'茝'), - (0x2F99A, 'M', u'荣'), - (0x2F99B, 'M', u'莭'), - (0x2F99C, 'M', u'茣'), - (0x2F99D, 'M', u'莽'), - (0x2F99E, 'M', u'菧'), - (0x2F99F, 'M', u'著'), - (0x2F9A0, 'M', u'荓'), - (0x2F9A1, 'M', u'菊'), - (0x2F9A2, 'M', u'菌'), - (0x2F9A3, 'M', u'菜'), - (0x2F9A4, 'M', u'𦰶'), - (0x2F9A5, 'M', u'𦵫'), - (0x2F9A6, 'M', u'𦳕'), - (0x2F9A7, 'M', u'䔫'), - (0x2F9A8, 'M', u'蓱'), - (0x2F9A9, 'M', u'蓳'), - (0x2F9AA, 'M', u'蔖'), - (0x2F9AB, 'M', u'𧏊'), - (0x2F9AC, 'M', u'蕤'), - (0x2F9AD, 'M', u'𦼬'), - (0x2F9AE, 'M', u'䕝'), - (0x2F9AF, 'M', u'䕡'), - (0x2F9B0, 'M', u'𦾱'), - (0x2F9B1, 'M', u'𧃒'), - (0x2F9B2, 'M', u'䕫'), - (0x2F9B3, 'M', u'虐'), - (0x2F9B4, 'M', u'虜'), - (0x2F9B5, 'M', u'虧'), - (0x2F9B6, 'M', u'虩'), - (0x2F9B7, 'M', u'蚩'), - (0x2F9B8, 'M', u'蚈'), - (0x2F9B9, 'M', u'蜎'), - (0x2F9BA, 'M', u'蛢'), - ] - -def _seg_77(): - return [ - (0x2F9BB, 'M', u'蝹'), - (0x2F9BC, 'M', u'蜨'), - (0x2F9BD, 'M', u'蝫'), - (0x2F9BE, 'M', u'螆'), - (0x2F9BF, 'X'), - (0x2F9C0, 'M', u'蟡'), - (0x2F9C1, 'M', u'蠁'), - (0x2F9C2, 'M', u'䗹'), - (0x2F9C3, 'M', u'衠'), - (0x2F9C4, 'M', u'衣'), - (0x2F9C5, 'M', u'𧙧'), - (0x2F9C6, 'M', u'裗'), - (0x2F9C7, 'M', u'裞'), - (0x2F9C8, 'M', u'䘵'), - (0x2F9C9, 'M', u'裺'), - (0x2F9CA, 'M', u'㒻'), - (0x2F9CB, 'M', u'𧢮'), - (0x2F9CC, 'M', u'𧥦'), - (0x2F9CD, 'M', u'䚾'), - (0x2F9CE, 'M', u'䛇'), - (0x2F9CF, 'M', u'誠'), - (0x2F9D0, 'M', u'諭'), - (0x2F9D1, 'M', u'變'), - (0x2F9D2, 'M', u'豕'), - (0x2F9D3, 'M', u'𧲨'), - (0x2F9D4, 'M', u'貫'), - (0x2F9D5, 'M', u'賁'), - (0x2F9D6, 'M', u'贛'), - (0x2F9D7, 'M', u'起'), - (0x2F9D8, 'M', u'𧼯'), - (0x2F9D9, 'M', u'𠠄'), - (0x2F9DA, 'M', u'跋'), - (0x2F9DB, 'M', u'趼'), - (0x2F9DC, 'M', u'跰'), - (0x2F9DD, 'M', u'𠣞'), - (0x2F9DE, 'M', u'軔'), - (0x2F9DF, 'M', u'輸'), - (0x2F9E0, 'M', u'𨗒'), - (0x2F9E1, 'M', u'𨗭'), - (0x2F9E2, 'M', u'邔'), - (0x2F9E3, 'M', u'郱'), - (0x2F9E4, 'M', u'鄑'), - (0x2F9E5, 'M', u'𨜮'), - (0x2F9E6, 'M', u'鄛'), - (0x2F9E7, 'M', u'鈸'), - (0x2F9E8, 'M', u'鋗'), - (0x2F9E9, 'M', u'鋘'), - (0x2F9EA, 'M', u'鉼'), - (0x2F9EB, 'M', u'鏹'), - (0x2F9EC, 'M', u'鐕'), - (0x2F9ED, 'M', u'𨯺'), - (0x2F9EE, 'M', u'開'), - (0x2F9EF, 'M', u'䦕'), - (0x2F9F0, 'M', u'閷'), - (0x2F9F1, 'M', u'𨵷'), - (0x2F9F2, 'M', u'䧦'), - (0x2F9F3, 'M', u'雃'), - (0x2F9F4, 'M', u'嶲'), - (0x2F9F5, 'M', u'霣'), - (0x2F9F6, 'M', u'𩅅'), - (0x2F9F7, 'M', u'𩈚'), - (0x2F9F8, 'M', u'䩮'), - (0x2F9F9, 'M', u'䩶'), - (0x2F9FA, 'M', u'韠'), - (0x2F9FB, 'M', u'𩐊'), - (0x2F9FC, 'M', u'䪲'), - (0x2F9FD, 'M', u'𩒖'), - (0x2F9FE, 'M', u'頋'), - (0x2FA00, 'M', u'頩'), - (0x2FA01, 'M', u'𩖶'), - (0x2FA02, 'M', u'飢'), - (0x2FA03, 'M', u'䬳'), - (0x2FA04, 'M', u'餩'), - (0x2FA05, 'M', u'馧'), - (0x2FA06, 'M', u'駂'), - (0x2FA07, 'M', u'駾'), - (0x2FA08, 'M', u'䯎'), - (0x2FA09, 'M', u'𩬰'), - (0x2FA0A, 'M', u'鬒'), - (0x2FA0B, 'M', u'鱀'), - (0x2FA0C, 'M', u'鳽'), - (0x2FA0D, 'M', u'䳎'), - (0x2FA0E, 'M', u'䳭'), - (0x2FA0F, 'M', u'鵧'), - (0x2FA10, 'M', u'𪃎'), - (0x2FA11, 'M', u'䳸'), - (0x2FA12, 'M', u'𪄅'), - (0x2FA13, 'M', u'𪈎'), - (0x2FA14, 'M', u'𪊑'), - (0x2FA15, 'M', u'麻'), - (0x2FA16, 'M', u'䵖'), - (0x2FA17, 'M', u'黹'), - (0x2FA18, 'M', u'黾'), - (0x2FA19, 'M', u'鼅'), - (0x2FA1A, 'M', u'鼏'), - (0x2FA1B, 'M', u'鼖'), - (0x2FA1C, 'M', u'鼻'), - (0x2FA1D, 'M', u'𪘀'), - (0x2FA1E, 'X'), - (0xE0100, 'I'), - ] - -def _seg_78(): - return [ - (0xE01F0, 'X'), - ] - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() -) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/LICENSE b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/LICENSE deleted file mode 100644 index be7e092..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2017-2019 Jason R. Coombs, Barry Warsaw - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/METADATA deleted file mode 100644 index c12da68..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/METADATA +++ /dev/null @@ -1,65 +0,0 @@ -Metadata-Version: 2.1 -Name: importlib-metadata -Version: 0.23 -Summary: Read metadata from Python packages -Home-page: http://importlib-metadata.readthedocs.io/ -Author: Barry Warsaw -Author-email: barry@python.org -License: Apache Software License -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Topic :: Software Development :: Libraries -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 2 -Requires-Python: >=2.7,!=3.0,!=3.1,!=3.2,!=3.3 -Requires-Dist: zipp (>=0.5) -Requires-Dist: contextlib2 ; python_version < "3" -Requires-Dist: configparser (>=3.5) ; python_version < "3" -Requires-Dist: pathlib2 ; python_version == "3.4.*" or python_version < "3" -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' -Requires-Dist: rst.linker ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: packaging ; extra == 'testing' -Requires-Dist: importlib-resources ; (python_version < "3.7") and extra == 'testing' - -========================= - ``importlib_metadata`` -========================= - -``importlib_metadata`` is a library to access the metadata for a Python -package. It is intended to be ported to Python 3.8. - - -Usage -===== - -See the `online documentation `_ -for usage details. - -`Finder authors -`_ can -also add support for custom package installers. See the above documentation -for details. - - -Caveats -======= - -This project primarily supports third-party packages installed by PyPA -tools (or other conforming packages). It does not support: - -- Packages in the stdlib. -- Packages installed without metadata. - -Project details -=============== - - * Project home: https://gitlab.com/python-devs/importlib_metadata - * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues - * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git - * Documentation: http://importlib_metadata.readthedocs.io/ - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/RECORD deleted file mode 100644 index 36c6b2f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/RECORD +++ /dev/null @@ -1,33 +0,0 @@ -importlib_metadata-0.23.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -importlib_metadata-0.23.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 -importlib_metadata-0.23.dist-info/METADATA,sha256=zaYmkbg8YZmp-MptyNCkstM4kvpqXcI5iw9XwqYsIqc,2105 -importlib_metadata-0.23.dist-info/RECORD,, -importlib_metadata-0.23.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 -importlib_metadata-0.23.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 -importlib_metadata/__init__.py,sha256=5ZATqOc76QoUeDwtX2Rul7EtcG5tumvv9k5wpegBQYU,17197 -importlib_metadata/__pycache__/__init__.cpython-37.pyc,, -importlib_metadata/__pycache__/_compat.cpython-37.pyc,, -importlib_metadata/_compat.py,sha256=PWaYFxVGb902XVUn8pwcZBSn94t2NUml00bdlpfUudg,3004 -importlib_metadata/docs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_metadata/docs/__pycache__/__init__.cpython-37.pyc,, -importlib_metadata/docs/__pycache__/conf.cpython-37.pyc,, -importlib_metadata/docs/changelog.rst,sha256=_RLRPO-npivxO-IlbhsdF9krNrukDWr6X4-sVfc-nqA,6205 -importlib_metadata/docs/conf.py,sha256=DM_-W8bvIar_YqWeRQUcgWT1_phXe-H2IcYgM8JIkiY,5468 -importlib_metadata/docs/index.rst,sha256=4T97pI0Iu40cEbqJy53LZeEToStyC45zHmZO9Cwe5Vk,2044 -importlib_metadata/docs/using.rst,sha256=2S6KGhJ66t8kM3cik7K03X1AJUGX0TWr6byaHEsJjnc,9826 -importlib_metadata/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_metadata/tests/__pycache__/__init__.cpython-37.pyc,, -importlib_metadata/tests/__pycache__/fixtures.cpython-37.pyc,, -importlib_metadata/tests/__pycache__/test_api.cpython-37.pyc,, -importlib_metadata/tests/__pycache__/test_integration.cpython-37.pyc,, -importlib_metadata/tests/__pycache__/test_main.cpython-37.pyc,, -importlib_metadata/tests/__pycache__/test_zip.cpython-37.pyc,, -importlib_metadata/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -importlib_metadata/tests/data/__pycache__/__init__.cpython-37.pyc,, -importlib_metadata/tests/data/example-21.12-py3-none-any.whl,sha256=I-kYufETid-tDYyR8f1OFJ3t5u_Io23k0cbQxJTUN4I,1455 -importlib_metadata/tests/data/example-21.12-py3.6.egg,sha256=-EeugFAijkdUO9xyQHTZkQwZoFXK0_QxICBj6R5AAJo,1497 -importlib_metadata/tests/fixtures.py,sha256=sshuoJ4ezljeouUddVg-76K1UOStKWBecovZOKOBguk,5004 -importlib_metadata/tests/test_api.py,sha256=YMAGTsRENrtvpw2CSLmRndJMBeT4q_M0GSe-QsnnMZ4,5544 -importlib_metadata/tests/test_integration.py,sha256=kzqav9qAePjz7UR-GNna65xLwXlRcxEDYDwmuOFwpKE,686 -importlib_metadata/tests/test_main.py,sha256=njWXHvOY0a9lNS4SbM7jkRJngk0EdmAm5cRR7KaFtKo,6213 -importlib_metadata/tests/test_zip.py,sha256=qG3IquiTFLSrUtpxEJblqiUtgEcOTfjU2yM35REk0fo,2372 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/WHEEL deleted file mode 100644 index 8b701e9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.6) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/top_level.txt deleted file mode 100644 index bbb0754..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata-0.23.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -importlib_metadata diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/__init__.py deleted file mode 100644 index f0e7eba..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/__init__.py +++ /dev/null @@ -1,547 +0,0 @@ -from __future__ import unicode_literals, absolute_import - -import io -import os -import re -import abc -import csv -import sys -import zipp -import operator -import functools -import itertools -import collections - -from ._compat import ( - install, - NullFinder, - ConfigParser, - suppress, - map, - FileNotFoundError, - IsADirectoryError, - NotADirectoryError, - PermissionError, - pathlib, - PYPY_OPEN_BUG, - ModuleNotFoundError, - MetaPathFinder, - email_message_from_string, - ensure_is_path, - ) -from importlib import import_module -from itertools import starmap - - -__metaclass__ = type - - -__all__ = [ - 'Distribution', - 'DistributionFinder', - 'PackageNotFoundError', - 'distribution', - 'distributions', - 'entry_points', - 'files', - 'metadata', - 'requires', - 'version', - ] - - -class PackageNotFoundError(ModuleNotFoundError): - """The package was not found.""" - - -class EntryPoint(collections.namedtuple('EntryPointBase', 'name value group')): - """An entry point as defined by Python packaging conventions. - - See `the packaging docs on entry points - `_ - for more information. - """ - - pattern = re.compile( - r'(?P[\w.]+)\s*' - r'(:\s*(?P[\w.]+))?\s*' - r'(?P\[.*\])?\s*$' - ) - """ - A regular expression describing the syntax for an entry point, - which might look like: - - - module - - package.module - - package.module:attribute - - package.module:object.attribute - - package.module:attr [extra1, extra2] - - Other combinations are possible as well. - - The expression is lenient about whitespace around the ':', - following the attr, and following any extras. - """ - - def load(self): - """Load the entry point from its definition. If only a module - is indicated by the value, return that module. Otherwise, - return the named object. - """ - match = self.pattern.match(self.value) - module = import_module(match.group('module')) - attrs = filter(None, (match.group('attr') or '').split('.')) - return functools.reduce(getattr, attrs, module) - - @property - def extras(self): - match = self.pattern.match(self.value) - return list(re.finditer(r'\w+', match.group('extras') or '')) - - @classmethod - def _from_config(cls, config): - return [ - cls(name, value, group) - for group in config.sections() - for name, value in config.items(group) - ] - - @classmethod - def _from_text(cls, text): - config = ConfigParser(delimiters='=') - # case sensitive: https://stackoverflow.com/q/1611799/812183 - config.optionxform = str - try: - config.read_string(text) - except AttributeError: # pragma: nocover - # Python 2 has no read_string - config.readfp(io.StringIO(text)) - return EntryPoint._from_config(config) - - def __iter__(self): - """ - Supply iter so one may construct dicts of EntryPoints easily. - """ - return iter((self.name, self)) - - -class PackagePath(pathlib.PurePosixPath): - """A reference to a path in a package""" - - def read_text(self, encoding='utf-8'): - with self.locate().open(encoding=encoding) as stream: - return stream.read() - - def read_binary(self): - with self.locate().open('rb') as stream: - return stream.read() - - def locate(self): - """Return a path-like object for this path""" - return self.dist.locate_file(self) - - -class FileHash: - def __init__(self, spec): - self.mode, _, self.value = spec.partition('=') - - def __repr__(self): - return ''.format(self.mode, self.value) - - -class Distribution: - """A Python distribution package.""" - - @abc.abstractmethod - def read_text(self, filename): - """Attempt to load metadata file given by the name. - - :param filename: The name of the file in the distribution info. - :return: The text if found, otherwise None. - """ - - @abc.abstractmethod - def locate_file(self, path): - """ - Given a path to a file in this distribution, return a path - to it. - """ - - @classmethod - def from_name(cls, name): - """Return the Distribution for the given package name. - - :param name: The name of the distribution package to search for. - :return: The Distribution instance (or subclass thereof) for the named - package, if found. - :raises PackageNotFoundError: When the named package's distribution - metadata cannot be found. - """ - for resolver in cls._discover_resolvers(): - context = DistributionFinder.Context(name=name) - dists = cls._maybe_bind(resolver, context) - dist = next(dists, None) - if dist is not None: - return dist - else: - raise PackageNotFoundError(name) - - @classmethod - def discover(cls, **kwargs): - """Return an iterable of Distribution objects for all packages. - - Pass a ``context`` or pass keyword arguments for constructing - a context. - - :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for all packages. - """ - context = kwargs.pop('context', None) - if context and kwargs: - raise ValueError("cannot accept context and kwargs") - context = context or DistributionFinder.Context(**kwargs) - return itertools.chain.from_iterable( - cls._maybe_bind(resolver, context) - for resolver in cls._discover_resolvers() - ) - - @staticmethod - def _maybe_bind(resolver, context): - """ - Only bind the context to the resolver if as a callable, - the resolver accepts the context parameter. - - Workaround for - https://gitlab.com/python-devs/importlib_metadata/issues/86 - """ - try: # pragma: nocover - return resolver(context) - except TypeError: # pragma: nocover - return resolver(name=context.name, path=context.path) - - @staticmethod - def at(path): - """Return a Distribution for the indicated metadata path - - :param path: a string or path-like object - :return: a concrete Distribution instance for the path - """ - return PathDistribution(ensure_is_path(path)) - - @staticmethod - def _discover_resolvers(): - """Search the meta_path for resolvers.""" - declared = ( - getattr(finder, 'find_distributions', None) - for finder in sys.meta_path - ) - return filter(None, declared) - - @property - def metadata(self): - """Return the parsed metadata for this Distribution. - - The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. - """ - text = ( - self.read_text('METADATA') - or self.read_text('PKG-INFO') - # This last clause is here to support old egg-info files. Its - # effect is to just end up using the PathDistribution's self._path - # (which points to the egg-info file) attribute unchanged. - or self.read_text('') - ) - return email_message_from_string(text) - - @property - def version(self): - """Return the 'Version' metadata for the distribution package.""" - return self.metadata['Version'] - - @property - def entry_points(self): - return EntryPoint._from_text(self.read_text('entry_points.txt')) - - @property - def files(self): - """Files in this distribution. - - :return: List of PackagePath for this distribution or None - - Result is `None` if the metadata file that enumerates files - (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is - missing. - Result may be empty if the metadata exists but is empty. - """ - file_lines = self._read_files_distinfo() or self._read_files_egginfo() - - def make_file(name, hash=None, size_str=None): - result = PackagePath(name) - result.hash = FileHash(hash) if hash else None - result.size = int(size_str) if size_str else None - result.dist = self - return result - - return file_lines and list(starmap(make_file, csv.reader(file_lines))) - - def _read_files_distinfo(self): - """ - Read the lines of RECORD - """ - text = self.read_text('RECORD') - return text and text.splitlines() - - def _read_files_egginfo(self): - """ - SOURCES.txt might contain literal commas, so wrap each line - in quotes. - """ - text = self.read_text('SOURCES.txt') - return text and map('"{}"'.format, text.splitlines()) - - @property - def requires(self): - """Generated requirements specified for this Distribution""" - reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() - return reqs and list(reqs) - - def _read_dist_info_reqs(self): - return self.metadata.get_all('Requires-Dist') - - def _read_egg_info_reqs(self): - source = self.read_text('requires.txt') - return source and self._deps_from_requires_text(source) - - @classmethod - def _deps_from_requires_text(cls, source): - section_pairs = cls._read_sections(source.splitlines()) - sections = { - section: list(map(operator.itemgetter('line'), results)) - for section, results in - itertools.groupby(section_pairs, operator.itemgetter('section')) - } - return cls._convert_egg_info_reqs_to_simple_reqs(sections) - - @staticmethod - def _read_sections(lines): - section = None - for line in filter(None, lines): - section_match = re.match(r'\[(.*)\]$', line) - if section_match: - section = section_match.group(1) - continue - yield locals() - - @staticmethod - def _convert_egg_info_reqs_to_simple_reqs(sections): - """ - Historically, setuptools would solicit and store 'extra' - requirements, including those with environment markers, - in separate sections. More modern tools expect each - dependency to be defined separately, with any relevant - extras and environment markers attached directly to that - requirement. This method converts the former to the - latter. See _test_deps_from_requires_text for an example. - """ - def make_condition(name): - return name and 'extra == "{name}"'.format(name=name) - - def parse_condition(section): - section = section or '' - extra, sep, markers = section.partition(':') - if extra and markers: - markers = '({markers})'.format(markers=markers) - conditions = list(filter(None, [markers, make_condition(extra)])) - return '; ' + ' and '.join(conditions) if conditions else '' - - for section, deps in sections.items(): - for dep in deps: - yield dep + parse_condition(section) - - -class DistributionFinder(MetaPathFinder): - """ - A MetaPathFinder capable of discovering installed distributions. - """ - - class Context: - - name = None - """ - Specific name for which a distribution finder should match. - """ - - def __init__(self, **kwargs): - vars(self).update(kwargs) - - @property - def path(self): - """ - The path that a distribution finder should search. - """ - return vars(self).get('path', sys.path) - - @property - def pattern(self): - return '.*' if self.name is None else re.escape(self.name) - - @abc.abstractmethod - def find_distributions(self, context=Context()): - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching the ``context``, - a DistributionFinder.Context instance. - """ - - -@install -class MetadataPathFinder(NullFinder, DistributionFinder): - """A degenerate finder for distribution packages on the file system. - - This finder supplies only a find_distributions() method for versions - of Python that do not have a PathFinder find_distributions(). - """ - - def find_distributions(self, context=DistributionFinder.Context()): - """ - Find distributions. - - Return an iterable of all Distribution instances capable of - loading the metadata for packages matching ``context.name`` - (or all names if ``None`` indicated) along the paths in the list - of directories ``context.path``. - """ - found = self._search_paths(context.pattern, context.path) - return map(PathDistribution, found) - - @classmethod - def _search_paths(cls, pattern, paths): - """Find metadata directories in paths heuristically.""" - return itertools.chain.from_iterable( - cls._search_path(path, pattern) - for path in map(cls._switch_path, paths) - ) - - @staticmethod - def _switch_path(path): - if not PYPY_OPEN_BUG or os.path.isfile(path): # pragma: no branch - with suppress(Exception): - return zipp.Path(path) - return pathlib.Path(path) - - @classmethod - def _matches_info(cls, normalized, item): - template = r'{pattern}(-.*)?\.(dist|egg)-info' - manifest = template.format(pattern=normalized) - return re.match(manifest, item.name, flags=re.IGNORECASE) - - @classmethod - def _matches_legacy(cls, normalized, item): - template = r'{pattern}-.*\.egg[\\/]EGG-INFO' - manifest = template.format(pattern=normalized) - return re.search(manifest, str(item), flags=re.IGNORECASE) - - @classmethod - def _search_path(cls, root, pattern): - if not root.is_dir(): - return () - normalized = pattern.replace('-', '_') - return (item for item in root.iterdir() - if cls._matches_info(normalized, item) - or cls._matches_legacy(normalized, item)) - - -class PathDistribution(Distribution): - def __init__(self, path): - """Construct a distribution from a path to the metadata directory. - - :param path: A pathlib.Path or similar object supporting - .joinpath(), __div__, .parent, and .read_text(). - """ - self._path = path - - def read_text(self, filename): - with suppress(FileNotFoundError, IsADirectoryError, KeyError, - NotADirectoryError, PermissionError): - return self._path.joinpath(filename).read_text(encoding='utf-8') - read_text.__doc__ = Distribution.read_text.__doc__ - - def locate_file(self, path): - return self._path.parent / path - - -def distribution(distribution_name): - """Get the ``Distribution`` instance for the named package. - - :param distribution_name: The name of the distribution package as a string. - :return: A ``Distribution`` instance (or subclass thereof). - """ - return Distribution.from_name(distribution_name) - - -def distributions(**kwargs): - """Get all ``Distribution`` instances in the current environment. - - :return: An iterable of ``Distribution`` instances. - """ - return Distribution.discover(**kwargs) - - -def metadata(distribution_name): - """Get the metadata for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: An email.Message containing the parsed metadata. - """ - return Distribution.from_name(distribution_name).metadata - - -def version(distribution_name): - """Get the version string for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: The version string for the package as defined in the package's - "Version" metadata key. - """ - return distribution(distribution_name).version - - -def entry_points(): - """Return EntryPoint objects for all installed packages. - - :return: EntryPoint objects for all installed packages. - """ - eps = itertools.chain.from_iterable( - dist.entry_points for dist in distributions()) - by_group = operator.attrgetter('group') - ordered = sorted(eps, key=by_group) - grouped = itertools.groupby(ordered, by_group) - return { - group: tuple(eps) - for group, eps in grouped - } - - -def files(distribution_name): - """Return a list of files for the named package. - - :param distribution_name: The name of the distribution package to query. - :return: List of files composing the distribution. - """ - return distribution(distribution_name).files - - -def requires(distribution_name): - """ - Return a list of requirements for the named package. - - :return: An iterator of requirements, suitable for - packaging.requirement.Requirement. - """ - return distribution(distribution_name).requires - - -__version__ = version(__name__) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/_compat.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/_compat.py deleted file mode 100644 index 4f54864..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/_compat.py +++ /dev/null @@ -1,100 +0,0 @@ -from __future__ import absolute_import - -import io -import abc -import sys -import email - - -if sys.version_info > (3,): # pragma: nocover - import builtins - from configparser import ConfigParser - from contextlib import suppress - FileNotFoundError = builtins.FileNotFoundError - IsADirectoryError = builtins.IsADirectoryError - NotADirectoryError = builtins.NotADirectoryError - PermissionError = builtins.PermissionError - map = builtins.map -else: # pragma: nocover - from backports.configparser import ConfigParser - from itertools import imap as map # type: ignore - from contextlib2 import suppress # noqa - FileNotFoundError = IOError, OSError - IsADirectoryError = IOError, OSError - NotADirectoryError = IOError, OSError - PermissionError = IOError, OSError - -if sys.version_info > (3, 5): # pragma: nocover - import pathlib -else: # pragma: nocover - import pathlib2 as pathlib - -try: - ModuleNotFoundError = builtins.FileNotFoundError -except (NameError, AttributeError): # pragma: nocover - ModuleNotFoundError = ImportError # type: ignore - - -if sys.version_info >= (3,): # pragma: nocover - from importlib.abc import MetaPathFinder -else: # pragma: nocover - class MetaPathFinder(object): - __metaclass__ = abc.ABCMeta - - -__metaclass__ = type -__all__ = [ - 'install', 'NullFinder', 'MetaPathFinder', 'ModuleNotFoundError', - 'pathlib', 'ConfigParser', 'map', 'suppress', 'FileNotFoundError', - 'NotADirectoryError', 'email_message_from_string', - ] - - -def install(cls): - """Class decorator for installation on sys.meta_path.""" - sys.meta_path.append(cls()) - return cls - - -class NullFinder: - """ - A "Finder" (aka "MetaClassFinder") that never finds any modules, - but may find distributions. - """ - @staticmethod - def find_spec(*args, **kwargs): - return None - - # In Python 2, the import system requires finders - # to have a find_module() method, but this usage - # is deprecated in Python 3 in favor of find_spec(). - # For the purposes of this finder (i.e. being present - # on sys.meta_path but having no other import - # system functionality), the two methods are identical. - find_module = find_spec - - -def py2_message_from_string(text): # nocoverpy3 - # Work around https://bugs.python.org/issue25545 where - # email.message_from_string cannot handle Unicode on Python 2. - io_buffer = io.StringIO(text) - return email.message_from_file(io_buffer) - - -email_message_from_string = ( - py2_message_from_string - if sys.version_info < (3,) else - email.message_from_string - ) - -# https://bitbucket.org/pypy/pypy/issues/3021/ioopen-directory-leaks-a-file-descriptor -PYPY_OPEN_BUG = getattr(sys, 'pypy_version_info', (9, 9, 9))[:3] <= (7, 1, 1) - - -def ensure_is_path(ob): - """Construct a Path from ob even if it's already one. - Specialized for Python 3.4. - """ - if (3,) < sys.version_info < (3, 5): - ob = str(ob) # pragma: nocover - return pathlib.Path(ob) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/changelog.rst b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/changelog.rst deleted file mode 100644 index 0437f41..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/changelog.rst +++ /dev/null @@ -1,189 +0,0 @@ -========================= - importlib_metadata NEWS -========================= - -0.23 -==== -* Added a compatibility shim to prevent failures on beta releases - of Python before the signature changed to accept the - "context" parameter on find_distributions. This workaround - will have a limited lifespan, not to extend beyond release of - Python 3.8 final. - -0.22 -==== -* Renamed ``package`` parameter to ``distribution_name`` - as `recommended `_ - in the following functions: ``distribution``, ``metadata``, - ``version``, ``files``, and ``requires``. This - backward-incompatible change is expected to have little impact - as these functions are assumed to be primarily used with - positional parameters. - -0.21 -==== -* ``importlib.metadata`` now exposes the ``DistributionFinder`` - metaclass and references it in the docs for extending the - search algorithm. -* Add ``Distribution.at`` for constructing a Distribution object - from a known metadata directory on the file system. Closes #80. -* Distribution finders now receive a context object that - supplies ``.path`` and ``.name`` properties. This change - introduces a fundamental backward incompatibility for - any projects implementing a ``find_distributions`` method - on a ``MetaPathFinder``. This new layer of abstraction - allows this context to be supplied directly or constructed - on demand and opens the opportunity for a - ``find_distributions`` method to solicit additional - context from the caller. Closes #85. - -0.20 -==== -* Clarify in the docs that calls to ``.files`` could return - ``None`` when the metadata is not present. Closes #69. -* Return all requirements and not just the first for dist-info - packages. Closes #67. - -0.19 -==== -* Restrain over-eager egg metadata resolution. -* Add support for entry points with colons in the name. Closes #75. - -0.18 -==== -* Parse entry points case sensitively. Closes #68 -* Add a version constraint on the backport configparser package. Closes #66 - -0.17 -==== -* Fix a permission problem in the tests on Windows. - -0.16 -==== -* Don't crash if there exists an EGG-INFO directory on sys.path. - -0.15 -==== -* Fix documentation. - -0.14 -==== -* Removed ``local_distribution`` function from the API. - **This backward-incompatible change removes this - behavior summarily**. Projects should remove their - reliance on this behavior. A replacement behavior is - under review in the `pep517 project - `_. Closes #42. - -0.13 -==== -* Update docstrings to match PEP 8. Closes #63. -* Merged modules into one module. Closes #62. - -0.12 -==== -* Add support for eggs. !65; Closes #19. - -0.11 -==== -* Support generic zip files (not just wheels). Closes #59 -* Support zip files with multiple distributions in them. Closes #60 -* Fully expose the public API in ``importlib_metadata.__all__``. - -0.10 -==== -* The ``Distribution`` ABC is now officially part of the public API. - Closes #37. -* Fixed support for older single file egg-info formats. Closes #43. -* Fixed a testing bug when ``$CWD`` has spaces in the path. Closes #50. -* Add Python 3.8 to the ``tox`` testing matrix. - -0.9 -=== -* Fixed issue where entry points without an attribute would raise an - Exception. Closes #40. -* Removed unused ``name`` parameter from ``entry_points()``. Closes #44. -* ``DistributionFinder`` classes must now be instantiated before - being placed on ``sys.meta_path``. - -0.8 -=== -* This library can now discover/enumerate all installed packages. **This - backward-incompatible change alters the protocol finders must - implement to support distribution package discovery.** Closes #24. -* The signature of ``find_distributions()`` on custom installer finders - should now accept two parameters, ``name`` and ``path`` and - these parameters must supply defaults. -* The ``entry_points()`` method no longer accepts a package name - but instead returns all entry points in a dictionary keyed by the - ``EntryPoint.group``. The ``resolve`` method has been removed. Instead, - call ``EntryPoint.load()``, which has the same semantics as - ``pkg_resources`` and ``entrypoints``. **This is a backward incompatible - change.** -* Metadata is now always returned as Unicode text regardless of - Python version. Closes #29. -* This library can now discover metadata for a 'local' package (found - in the current-working directory). Closes #27. -* Added ``files()`` function for resolving files from a distribution. -* Added a new ``requires()`` function, which returns the requirements - for a package suitable for parsing by - ``packaging.requirements.Requirement``. Closes #18. -* The top-level ``read_text()`` function has been removed. Use - ``PackagePath.read_text()`` on instances returned by the ``files()`` - function. **This is a backward incompatible change.** -* Release dates are now automatically injected into the changelog - based on SCM tags. - -0.7 -=== -* Fixed issue where packages with dashes in their names would - not be discovered. Closes #21. -* Distribution lookup is now case-insensitive. Closes #20. -* Wheel distributions can no longer be discovered by their module - name. Like Path distributions, they must be indicated by their - distribution package name. - -0.6 -=== -* Removed ``importlib_metadata.distribution`` function. Now - the public interface is primarily the utility functions exposed - in ``importlib_metadata.__all__``. Closes #14. -* Added two new utility functions ``read_text`` and - ``metadata``. - -0.5 -=== -* Updated README and removed details about Distribution - class, now considered private. Closes #15. -* Added test suite support for Python 3.4+. -* Fixed SyntaxErrors on Python 3.4 and 3.5. !12 -* Fixed errors on Windows joining Path elements. !15 - -0.4 -=== -* Housekeeping. - -0.3 -=== -* Added usage documentation. Closes #8 -* Add support for getting metadata from wheels on ``sys.path``. Closes #9 - -0.2 -=== -* Added ``importlib_metadata.entry_points()``. Closes #1 -* Added ``importlib_metadata.resolve()``. Closes #12 -* Add support for Python 2.7. Closes #4 - -0.1 -=== -* Initial release. - - -.. - Local Variables: - mode: change-log-mode - indent-tabs-mode: nil - sentence-end-double-space: t - fill-column: 78 - coding: utf-8 - End: diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/conf.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/conf.py deleted file mode 100644 index af9f0e2..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/conf.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# importlib_metadata documentation build configuration file, created by -# sphinx-quickstart on Thu Nov 30 10:21:00 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'rst.linker', - 'sphinx.ext.autodoc', - 'sphinx.ext.coverage', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - ] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = 'importlib_metadata' -copyright = '2017-2019, Jason R. Coombs, Barry Warsaw' -author = 'Jason R. Coombs, Barry Warsaw' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.1' -# The full version, including alpha/beta/rc tags. -release = '0.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'default' - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - ] - } - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = 'importlib_metadatadoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', - } - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'importlib_metadata.tex', - 'importlib\\_metadata Documentation', - 'Brett Cannon, Barry Warsaw', 'manual'), - ] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', - [author], 1) - ] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'importlib_metadata', 'importlib_metadata Documentation', - author, 'importlib_metadata', 'One line description of project.', - 'Miscellaneous'), - ] - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'python': ('https://docs.python.org/3', None), - } - - -# For rst.linker, inject release dates into changelog.rst -link_files = { - 'changelog.rst': dict( - replace=[ - dict( - pattern=r'^(?m)((?Pv?\d+(\.\d+){1,2}))\n[-=]+\n', - with_scm='{text}\n{rev[timestamp]:%Y-%m-%d}\n\n', - ), - ], - ), - } diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/index.rst b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/index.rst deleted file mode 100644 index fb5047f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/index.rst +++ /dev/null @@ -1,53 +0,0 @@ -=============================== - Welcome to importlib_metadata -=============================== - -``importlib_metadata`` is a library which provides an API for accessing an -installed package's `metadata`_, such as its entry points or its top-level -name. This functionality intends to replace most uses of ``pkg_resources`` -`entry point API`_ and `metadata API`_. Along with ``importlib.resources`` in -`Python 3.7 and newer`_ (backported as `importlib_resources`_ for older -versions of Python), this can eliminate the need to use the older and less -efficient ``pkg_resources`` package. - -``importlib_metadata`` is a backport of Python 3.8's standard library -`importlib.metadata`_ module for Python 2.7, and 3.4 through 3.7. Users of -Python 3.8 and beyond are encouraged to use the standard library module, and -in fact for these versions, ``importlib_metadata`` just shadows that module. -Developers looking for detailed API descriptions should refer to the Python -3.8 standard library documentation. - -The documentation here includes a general :ref:`usage ` guide. - - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - using.rst - changelog (links).rst - - -Project details -=============== - - * Project home: https://gitlab.com/python-devs/importlib_metadata - * Report bugs at: https://gitlab.com/python-devs/importlib_metadata/issues - * Code hosting: https://gitlab.com/python-devs/importlib_metadata.git - * Documentation: http://importlib_metadata.readthedocs.io/ - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - - -.. _`metadata`: https://www.python.org/dev/peps/pep-0566/ -.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points -.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api -.. _`Python 3.7 and newer`: https://docs.python.org/3/library/importlib.html#module-importlib.resources -.. _`importlib_resources`: https://importlib-resources.readthedocs.io/en/latest/index.html -.. _`importlib.metadata`: TBD diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/using.rst b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/using.rst deleted file mode 100644 index da40a0c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/docs/using.rst +++ /dev/null @@ -1,259 +0,0 @@ -.. _using: - -========================== - Using importlib_metadata -========================== - -``importlib_metadata`` is a library that provides for access to installed -package metadata. Built in part on Python's import system, this library -intends to replace similar functionality in the `entry point -API`_ and `metadata API`_ of ``pkg_resources``. Along with -``importlib.resources`` in `Python 3.7 -and newer`_ (backported as `importlib_resources`_ for older versions of -Python), this can eliminate the need to use the older and less efficient -``pkg_resources`` package. - -By "installed package" we generally mean a third-party package installed into -Python's ``site-packages`` directory via tools such as `pip -`_. Specifically, -it means a package with either a discoverable ``dist-info`` or ``egg-info`` -directory, and metadata defined by `PEP 566`_ or its older specifications. -By default, package metadata can live on the file system or in zip archives on -``sys.path``. Through an extension mechanism, the metadata can live almost -anywhere. - - -Overview -======== - -Let's say you wanted to get the version string for a package you've installed -using ``pip``. We start by creating a virtual environment and installing -something into it:: - - $ python3 -m venv example - $ source example/bin/activate - (example) $ pip install importlib_metadata - (example) $ pip install wheel - -You can get the version string for ``wheel`` by running the following:: - - (example) $ python - >>> from importlib_metadata import version - >>> version('wheel') - '0.32.3' - -You can also get the set of entry points keyed by group, such as -``console_scripts``, ``distutils.commands`` and others. Each group contains a -sequence of :ref:`EntryPoint ` objects. - -You can get the :ref:`metadata for a distribution `:: - - >>> list(metadata('wheel')) - ['Metadata-Version', 'Name', 'Version', 'Summary', 'Home-page', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'License', 'Project-URL', 'Project-URL', 'Project-URL', 'Keywords', 'Platform', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Classifier', 'Requires-Python', 'Provides-Extra', 'Requires-Dist', 'Requires-Dist'] - -You can also get a :ref:`distribution's version number `, list its -:ref:`constituent files `, and get a list of the distribution's -:ref:`requirements`. - - -Functional API -============== - -This package provides the following functionality via its public API. - - -.. _entry-points: - -Entry points ------------- - -The ``entry_points()`` function returns a dictionary of all entry points, -keyed by group. Entry points are represented by ``EntryPoint`` instances; -each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and -a ``.load()`` method to resolve the value:: - - >>> eps = entry_points() - >>> list(eps) - ['console_scripts', 'distutils.commands', 'distutils.setup_keywords', 'egg_info.writers', 'setuptools.installation'] - >>> scripts = eps['console_scripts'] - >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0] - >>> wheel - EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts') - >>> main = wheel.load() - >>> main - - -The ``group`` and ``name`` are arbitrary values defined by the package author -and usually a client will wish to resolve all entry points for a particular -group. Read `the setuptools docs -`_ -for more information on entrypoints, their definition, and usage. - - -.. _metadata: - -Distribution metadata ---------------------- - -Every distribution includes some metadata, which you can extract using the -``metadata()`` function:: - - >>> wheel_metadata = metadata('wheel') - -The keys of the returned data structure [#f1]_ name the metadata keywords, and -their values are returned unparsed from the distribution metadata:: - - >>> wheel_metadata['Requires-Python'] - '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' - - -.. _version: - -Distribution versions ---------------------- - -The ``version()`` function is the quickest way to get a distribution's version -number, as a string:: - - >>> version('wheel') - '0.32.3' - - -.. _files: - -Distribution files ------------------- - -You can also get the full set of files contained within a distribution. The -``files()`` function takes a distribution package name and returns all of the -files installed by this distribution. Each file object returned is a -``PackagePath``, a `pathlib.Path`_ derived object with additional ``dist``, -``size``, and ``hash`` properties as indicated by the metadata. For example:: - - >>> util = [p for p in files('wheel') if 'util.py' in str(p)][0] - >>> util - PackagePath('wheel/util.py') - >>> util.size - 859 - >>> util.dist - - >>> util.hash - - -Once you have the file, you can also read its contents:: - - >>> print(util.read_text()) - import base64 - import sys - ... - def as_bytes(s): - if isinstance(s, text_type): - return s.encode('utf-8') - return s - -In the case where the metadata file listing files -(RECORD or SOURCES.txt) is missing, ``files()`` will -return ``None``. The caller may wish to wrap calls to -``files()`` in `always_iterable -`_ -or otherwise guard against this condition if the target -distribution is not known to have the metadata present. - -.. _requirements: - -Distribution requirements -------------------------- - -To get the full set of requirements for a distribution, use the ``requires()`` -function:: - - >>> requires('wheel') - ["pytest (>=3.0.0) ; extra == 'test'", "pytest-cov ; extra == 'test'"] - - -Distributions -============= - -While the above API is the most common and convenient usage, you can get all -of that information from the ``Distribution`` class. A ``Distribution`` is an -abstract object that represents the metadata for a Python package. You can -get the ``Distribution`` instance:: - - >>> from importlib_metadata import distribution - >>> dist = distribution('wheel') - -Thus, an alternative way to get the version number is through the -``Distribution`` instance:: - - >>> dist.version - '0.32.3' - -There are all kinds of additional metadata available on the ``Distribution`` -instance:: - - >>> d.metadata['Requires-Python'] - '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*' - >>> d.metadata['License'] - 'MIT' - -The full set of available metadata is not described here. See `PEP 566 -`_ for additional details. - - -Extending the search algorithm -============================== - -Because package metadata is not available through ``sys.path`` searches, or -package loaders directly, the metadata for a package is found through import -system `finders`_. To find a distribution package's metadata, -``importlib_metadata`` queries the list of `meta path finders`_ on -`sys.meta_path`_. - -By default ``importlib_metadata`` installs a finder for distribution packages -found on the file system. This finder doesn't actually find any *packages*, -but it can find the packages' metadata. - -The abstract class :py:class:`importlib.abc.MetaPathFinder` defines the -interface expected of finders by Python's import system. -``importlib_metadata`` extends this protocol by looking for an optional -``find_distributions`` callable on the finders from -``sys.meta_path`` and presents this extended interface as the -``DistributionFinder`` abstract base class, which defines this abstract -method:: - - @abc.abstractmethod - def find_distributions(context=DistributionFinder.Context()): - """Return an iterable of all Distribution instances capable of - loading the metadata for packages for the indicated ``context``. - """ - -The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` -properties indicating the path to search and names to match and may -supply other relevant context. - -What this means in practice is that to support finding distribution package -metadata in locations other than the file system, you should derive from -``Distribution`` and implement the ``load_metadata()`` method. Then from -your finder, return instances of this derived ``Distribution`` in the -``find_distributions()`` method. - - -.. _`entry point API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#entry-points -.. _`metadata API`: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#metadata-api -.. _`Python 3.7 and newer`: https://docs.python.org/3/library/importlib.html#module-importlib.resources -.. _`importlib_resources`: https://importlib-resources.readthedocs.io/en/latest/index.html -.. _`PEP 566`: https://www.python.org/dev/peps/pep-0566/ -.. _`finders`: https://docs.python.org/3/reference/import.html#finders-and-loaders -.. _`meta path finders`: https://docs.python.org/3/glossary.html#term-meta-path-finder -.. _`sys.meta_path`: https://docs.python.org/3/library/sys.html#sys.meta_path -.. _`pathlib.Path`: https://docs.python.org/3/library/pathlib.html#pathlib.Path - - -.. rubric:: Footnotes - -.. [#f1] Technically, the returned distribution metadata object is an - `email.message.Message - `_ - instance, but this is an implementation detail, and not part of the - stable API. You should only use dictionary-like methods and syntax - to access the metadata contents. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl deleted file mode 100644 index 641ab07..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3-none-any.whl and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg deleted file mode 100644 index cdb298a..0000000 Binary files a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/data/example-21.12-py3.6.egg and /dev/null differ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/fixtures.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/fixtures.py deleted file mode 100644 index 0b4ce18..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/fixtures.py +++ /dev/null @@ -1,200 +0,0 @@ -from __future__ import unicode_literals - -import os -import sys -import shutil -import tempfile -import textwrap -import contextlib - -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - -try: - import pathlib -except ImportError: - import pathlib2 as pathlib - - -__metaclass__ = type - - -@contextlib.contextmanager -def tempdir(): - tmpdir = tempfile.mkdtemp() - try: - yield pathlib.Path(tmpdir) - finally: - shutil.rmtree(tmpdir) - - -@contextlib.contextmanager -def save_cwd(): - orig = os.getcwd() - try: - yield - finally: - os.chdir(orig) - - -@contextlib.contextmanager -def tempdir_as_cwd(): - with tempdir() as tmp: - with save_cwd(): - os.chdir(str(tmp)) - yield tmp - - -class SiteDir: - def setUp(self): - self.fixtures = ExitStack() - self.addCleanup(self.fixtures.close) - self.site_dir = self.fixtures.enter_context(tempdir()) - - -class OnSysPath: - @staticmethod - @contextlib.contextmanager - def add_sys_path(dir): - sys.path[:0] = [str(dir)] - try: - yield - finally: - sys.path.remove(str(dir)) - - def setUp(self): - super(OnSysPath, self).setUp() - self.fixtures.enter_context(self.add_sys_path(self.site_dir)) - - -class DistInfoPkg(OnSysPath, SiteDir): - files = { - "distinfo_pkg-1.0.0.dist-info": { - "METADATA": """ - Name: distinfo-pkg - Author: Steven Ma - Version: 1.0.0 - Requires-Dist: wheel >= 1.0 - Requires-Dist: pytest; extra == 'test' - """, - "RECORD": "mod.py,sha256=abc,20\n", - "entry_points.txt": """ - [entries] - main = mod:main - ns:sub = mod:main - """ - }, - "mod.py": """ - def main(): - print("hello world") - """, - } - - def setUp(self): - super(DistInfoPkg, self).setUp() - build_files(DistInfoPkg.files, self.site_dir) - - -class DistInfoPkgOffPath(SiteDir): - def setUp(self): - super(DistInfoPkgOffPath, self).setUp() - build_files(DistInfoPkg.files, self.site_dir) - - -class EggInfoPkg(OnSysPath, SiteDir): - files = { - "egginfo_pkg.egg-info": { - "PKG-INFO": """ - Name: egginfo-pkg - Author: Steven Ma - License: Unknown - Version: 1.0.0 - Classifier: Intended Audience :: Developers - Classifier: Topic :: Software Development :: Libraries - """, - "SOURCES.txt": """ - mod.py - egginfo_pkg.egg-info/top_level.txt - """, - "entry_points.txt": """ - [entries] - main = mod:main - """, - "requires.txt": """ - wheel >= 1.0; python_version >= "2.7" - [test] - pytest - """, - "top_level.txt": "mod\n" - }, - "mod.py": """ - def main(): - print("hello world") - """, - } - - def setUp(self): - super(EggInfoPkg, self).setUp() - build_files(EggInfoPkg.files, prefix=self.site_dir) - - -class EggInfoFile(OnSysPath, SiteDir): - files = { - "egginfo_file.egg-info": """ - Metadata-Version: 1.0 - Name: egginfo_file - Version: 0.1 - Summary: An example package - Home-page: www.example.com - Author: Eric Haffa-Vee - Author-email: eric@example.coms - License: UNKNOWN - Description: UNKNOWN - Platform: UNKNOWN - """, - } - - def setUp(self): - super(EggInfoFile, self).setUp() - build_files(EggInfoFile.files, prefix=self.site_dir) - - -def build_files(file_defs, prefix=pathlib.Path()): - """Build a set of files/directories, as described by the - - file_defs dictionary. Each key/value pair in the dictionary is - interpreted as a filename/contents pair. If the contents value is a - dictionary, a directory is created, and the dictionary interpreted - as the files within it, recursively. - - For example: - - {"README.txt": "A README file", - "foo": { - "__init__.py": "", - "bar": { - "__init__.py": "", - }, - "baz.py": "# Some code", - } - } - """ - for name, contents in file_defs.items(): - full_name = prefix / name - if isinstance(contents, dict): - full_name.mkdir() - build_files(contents, prefix=full_name) - else: - if isinstance(contents, bytes): - with full_name.open('wb') as f: - f.write(contents) - else: - with full_name.open('w') as f: - f.write(DALS(contents)) - - -def DALS(str): - "Dedent and left-strip" - return textwrap.dedent(str).lstrip() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_api.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_api.py deleted file mode 100644 index aa346dd..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_api.py +++ /dev/null @@ -1,176 +0,0 @@ -import re -import textwrap -import unittest - -from . import fixtures -from .. import ( - Distribution, PackageNotFoundError, __version__, distribution, - entry_points, files, metadata, requires, version, - ) - -try: - from collections.abc import Iterator -except ImportError: - from collections import Iterator # noqa: F401 - -try: - from builtins import str as text -except ImportError: - from __builtin__ import unicode as text - - -class APITests( - fixtures.EggInfoPkg, - fixtures.DistInfoPkg, - fixtures.EggInfoFile, - unittest.TestCase): - - version_pattern = r'\d+\.\d+(\.\d)?' - - def test_retrieves_version_of_self(self): - pkg_version = version('egginfo-pkg') - assert isinstance(pkg_version, text) - assert re.match(self.version_pattern, pkg_version) - - def test_retrieves_version_of_distinfo_pkg(self): - pkg_version = version('distinfo-pkg') - assert isinstance(pkg_version, text) - assert re.match(self.version_pattern, pkg_version) - - def test_for_name_does_not_exist(self): - with self.assertRaises(PackageNotFoundError): - distribution('does-not-exist') - - def test_for_top_level(self): - self.assertEqual( - distribution('egginfo-pkg').read_text('top_level.txt').strip(), - 'mod') - - def test_read_text(self): - top_level = [ - path for path in files('egginfo-pkg') - if path.name == 'top_level.txt' - ][0] - self.assertEqual(top_level.read_text(), 'mod\n') - - def test_entry_points(self): - entries = dict(entry_points()['entries']) - ep = entries['main'] - self.assertEqual(ep.value, 'mod:main') - self.assertEqual(ep.extras, []) - - def test_metadata_for_this_package(self): - md = metadata('egginfo-pkg') - assert md['author'] == 'Steven Ma' - assert md['LICENSE'] == 'Unknown' - assert md['Name'] == 'egginfo-pkg' - classifiers = md.get_all('Classifier') - assert 'Topic :: Software Development :: Libraries' in classifiers - - def test_importlib_metadata_version(self): - assert re.match(self.version_pattern, __version__) - - @staticmethod - def _test_files(files): - root = files[0].root - for file in files: - assert file.root == root - assert not file.hash or file.hash.value - assert not file.hash or file.hash.mode == 'sha256' - assert not file.size or file.size >= 0 - assert file.locate().exists() - assert isinstance(file.read_binary(), bytes) - if file.name.endswith('.py'): - file.read_text() - - def test_file_hash_repr(self): - try: - assertRegex = self.assertRegex - except AttributeError: - # Python 2 - assertRegex = self.assertRegexpMatches - - util = [ - p for p in files('distinfo-pkg') - if p.name == 'mod.py' - ][0] - assertRegex( - repr(util.hash), - '') - - def test_files_dist_info(self): - self._test_files(files('distinfo-pkg')) - - def test_files_egg_info(self): - self._test_files(files('egginfo-pkg')) - - def test_version_egg_info_file(self): - self.assertEqual(version('egginfo-file'), '0.1') - - def test_requires_egg_info_file(self): - requirements = requires('egginfo-file') - self.assertIsNone(requirements) - - def test_requires_egg_info(self): - deps = requires('egginfo-pkg') - assert len(deps) == 2 - assert any( - dep == 'wheel >= 1.0; python_version >= "2.7"' - for dep in deps - ) - - def test_requires_dist_info(self): - deps = requires('distinfo-pkg') - assert len(deps) == 2 - assert all(deps) - assert 'wheel >= 1.0' in deps - assert "pytest; extra == 'test'" in deps - - def test_more_complex_deps_requires_text(self): - requires = textwrap.dedent(""" - dep1 - dep2 - - [:python_version < "3"] - dep3 - - [extra1] - dep4 - - [extra2:python_version < "3"] - dep5 - """) - deps = sorted(Distribution._deps_from_requires_text(requires)) - expected = [ - 'dep1', - 'dep2', - 'dep3; python_version < "3"', - 'dep4; extra == "extra1"', - 'dep5; (python_version < "3") and extra == "extra2"', - ] - # It's important that the environment marker expression be - # wrapped in parentheses to avoid the following 'and' binding more - # tightly than some other part of the environment expression. - - assert deps == expected - - -class OffSysPathTests(fixtures.DistInfoPkgOffPath, unittest.TestCase): - def test_find_distributions_specified_path(self): - dists = Distribution.discover(path=[str(self.site_dir)]) - assert any( - dist.metadata['Name'] == 'distinfo-pkg' - for dist in dists - ) - - def test_distribution_at_pathlib(self): - """Demonstrate how to load metadata direct from a directory. - """ - dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' - dist = Distribution.at(dist_info_path) - assert dist.version == '1.0.0' - - def test_distribution_at_str(self): - dist_info_path = self.site_dir / 'distinfo_pkg-1.0.0.dist-info' - dist = Distribution.at(str(dist_info_path)) - assert dist.version == '1.0.0' diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_integration.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_integration.py deleted file mode 100644 index 11ed7dc..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_integration.py +++ /dev/null @@ -1,22 +0,0 @@ -import unittest -import packaging.requirements -import packaging.version - -from . import fixtures -from .. import version - - -class IntegrationTests(fixtures.DistInfoPkg, unittest.TestCase): - - def test_package_spec_installed(self): - """ - Illustrate the recommended procedure to determine if - a specified version of a package is installed. - """ - def is_installed(package_spec): - req = packaging.requirements.Requirement(package_spec) - return version(req.name) in req.specifier - - assert is_installed('distinfo-pkg==1.0') - assert is_installed('distinfo-pkg>=1.0,<2.0') - assert not is_installed('distinfo-pkg<1.0') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_main.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_main.py deleted file mode 100644 index 38ec884..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_main.py +++ /dev/null @@ -1,191 +0,0 @@ -# coding: utf-8 -from __future__ import unicode_literals - -import re -import textwrap -import unittest -import importlib -import importlib_metadata - -from . import fixtures -from .. import ( - Distribution, EntryPoint, MetadataPathFinder, - PackageNotFoundError, distributions, - entry_points, metadata, version, - ) - -try: - from builtins import str as text -except ImportError: - from __builtin__ import unicode as text - - -class BasicTests(fixtures.DistInfoPkg, unittest.TestCase): - version_pattern = r'\d+\.\d+(\.\d)?' - - def test_retrieves_version_of_self(self): - dist = Distribution.from_name('distinfo-pkg') - assert isinstance(dist.version, text) - assert re.match(self.version_pattern, dist.version) - - def test_for_name_does_not_exist(self): - with self.assertRaises(PackageNotFoundError): - Distribution.from_name('does-not-exist') - - def test_new_style_classes(self): - self.assertIsInstance(Distribution, type) - self.assertIsInstance(MetadataPathFinder, type) - - -class ImportTests(fixtures.DistInfoPkg, unittest.TestCase): - def test_import_nonexistent_module(self): - # Ensure that the MetadataPathFinder does not crash an import of a - # non-existent module. - with self.assertRaises(ImportError): - importlib.import_module('does_not_exist') - - def test_resolve(self): - entries = dict(entry_points()['entries']) - ep = entries['main'] - self.assertEqual(ep.load().__name__, "main") - - def test_entrypoint_with_colon_in_name(self): - entries = dict(entry_points()['entries']) - ep = entries['ns:sub'] - self.assertEqual(ep.value, 'mod:main') - - def test_resolve_without_attr(self): - ep = EntryPoint( - name='ep', - value='importlib_metadata', - group='grp', - ) - assert ep.load() is importlib_metadata - - -class NameNormalizationTests( - fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - @staticmethod - def pkg_with_dashes(site_dir): - """ - Create minimal metadata for a package with dashes - in the name (and thus underscores in the filename). - """ - metadata_dir = site_dir / 'my_pkg.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w') as strm: - strm.write('Version: 1.0\n') - return 'my-pkg' - - def test_dashes_in_dist_name_found_as_underscores(self): - """ - For a package with a dash in the name, the dist-info metadata - uses underscores in the name. Ensure the metadata loads. - """ - pkg_name = self.pkg_with_dashes(self.site_dir) - assert version(pkg_name) == '1.0' - - @staticmethod - def pkg_with_mixed_case(site_dir): - """ - Create minimal metadata for a package with mixed case - in the name. - """ - metadata_dir = site_dir / 'CherryPy.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w') as strm: - strm.write('Version: 1.0\n') - return 'CherryPy' - - def test_dist_name_found_as_any_case(self): - """ - Ensure the metadata loads when queried with any case. - """ - pkg_name = self.pkg_with_mixed_case(self.site_dir) - assert version(pkg_name) == '1.0' - assert version(pkg_name.lower()) == '1.0' - assert version(pkg_name.upper()) == '1.0' - - -class NonASCIITests(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - @staticmethod - def pkg_with_non_ascii_description(site_dir): - """ - Create minimal metadata for a package with non-ASCII in - the description. - """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write('Description: pôrˈtend\n') - return 'portend' - - @staticmethod - def pkg_with_non_ascii_description_egg_info(site_dir): - """ - Create minimal metadata for an egg-info package with - non-ASCII in the description. - """ - metadata_dir = site_dir / 'portend.dist-info' - metadata_dir.mkdir() - metadata = metadata_dir / 'METADATA' - with metadata.open('w', encoding='utf-8') as fp: - fp.write(textwrap.dedent(""" - Name: portend - - pôrˈtend - """).lstrip()) - return 'portend' - - def test_metadata_loads(self): - pkg_name = self.pkg_with_non_ascii_description(self.site_dir) - meta = metadata(pkg_name) - assert meta['Description'] == 'pôrˈtend' - - def test_metadata_loads_egg_info(self): - pkg_name = self.pkg_with_non_ascii_description_egg_info(self.site_dir) - meta = metadata(pkg_name) - assert meta.get_payload() == 'pôrˈtend\n' - - -class DiscoveryTests(fixtures.EggInfoPkg, - fixtures.DistInfoPkg, - unittest.TestCase): - - def test_package_discovery(self): - dists = list(distributions()) - assert all( - isinstance(dist, Distribution) - for dist in dists - ) - assert any( - dist.metadata['Name'] == 'egginfo-pkg' - for dist in dists - ) - assert any( - dist.metadata['Name'] == 'distinfo-pkg' - for dist in dists - ) - - def test_invalid_usage(self): - with self.assertRaises(ValueError): - list(distributions(context='something', name='else')) - - -class DirectoryTest(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): - def test_egg_info(self): - # make an `EGG-INFO` directory that's unrelated - self.site_dir.joinpath('EGG-INFO').mkdir() - # used to crash with `IsADirectoryError` - with self.assertRaises(PackageNotFoundError): - version('unknown-package') - - def test_egg(self): - egg = self.site_dir.joinpath('foo-3.6.egg') - egg.mkdir() - with self.add_sys_path(egg): - with self.assertRaises(PackageNotFoundError): - version('foo') diff --git a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_zip.py b/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_zip.py deleted file mode 100644 index 8cbba63..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/importlib_metadata/tests/test_zip.py +++ /dev/null @@ -1,70 +0,0 @@ -import sys -import unittest - -from .. import distribution, entry_points, files, PackageNotFoundError, version - -try: - from importlib.resources import path -except ImportError: - from importlib_resources import path - -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - - -class TestZip(unittest.TestCase): - root = 'importlib_metadata.tests.data' - - def setUp(self): - # Find the path to the example-*.whl so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) - wheel = self.resources.enter_context( - path(self.root, 'example-21.12-py3-none-any.whl')) - sys.path.insert(0, str(wheel)) - self.resources.callback(sys.path.pop, 0) - - def test_zip_version(self): - self.assertEqual(version('example'), '21.12') - - def test_zip_version_does_not_match(self): - with self.assertRaises(PackageNotFoundError): - version('definitely-not-installed') - - def test_zip_entry_points(self): - scripts = dict(entry_points()['console_scripts']) - entry_point = scripts['example'] - self.assertEqual(entry_point.value, 'example:main') - entry_point = scripts['Example'] - self.assertEqual(entry_point.value, 'example:main') - - def test_missing_metadata(self): - self.assertIsNone(distribution('example').read_text('does not exist')) - - def test_case_insensitive(self): - self.assertEqual(version('Example'), '21.12') - - def test_files(self): - for file in files('example'): - path = str(file.dist.locate_file(file)) - assert '.whl/' in path, path - - -class TestEgg(TestZip): - def setUp(self): - # Find the path to the example-*.egg so we can add it to the front of - # sys.path, where we'll then try to find the metadata thereof. - self.resources = ExitStack() - self.addCleanup(self.resources.close) - egg = self.resources.enter_context( - path(self.root, 'example-21.12-py3.6.egg')) - sys.path.insert(0, str(egg)) - self.resources.callback(sys.path.pop, 0) - - def test_files(self): - for file in files('example'): - path = str(file.dist.locate_file(file)) - assert '.egg/' in path, path diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/LICENSE b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/LICENSE deleted file mode 100644 index 0a523be..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2012 Erik Rose - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/METADATA deleted file mode 100644 index d7b367b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/METADATA +++ /dev/null @@ -1,538 +0,0 @@ -Metadata-Version: 2.1 -Name: more-itertools -Version: 7.2.0 -Summary: More routines for operating on iterables, beyond itertools -Home-page: https://github.com/erikrose/more-itertools -Author: Erik Rose -Author-email: erikrose@grinchcentral.com -License: MIT -Keywords: itertools,iterator,iteration,filter,peek,peekable,collate,chunk,chunked -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Natural Language :: English -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Topic :: Software Development :: Libraries -Requires-Python: >=3.4 - -============== -More Itertools -============== - -.. image:: https://coveralls.io/repos/github/erikrose/more-itertools/badge.svg?branch=master - :target: https://coveralls.io/github/erikrose/more-itertools?branch=master - -Python's ``itertools`` library is a gem - you can compose elegant solutions -for a variety of problems with the functions it provides. In ``more-itertools`` -we collect additional building blocks, recipes, and routines for working with -Python iterables. - ----- - -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Grouping | `chunked `_, | -| | `ichunked `_, | -| | `sliced `_, | -| | `distribute `_, | -| | `divide `_, | -| | `split_at `_, | -| | `split_before `_, | -| | `split_after `_, | -| | `split_into `_, | -| | `bucket `_, | -| | `unzip `_, | -| | `grouper `_, | -| | `partition `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Lookahead and lookback | `spy `_, | -| | `peekable `_, | -| | `seekable `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Windowing | `windowed `_, | -| | `substrings `_, | -| | `substrings_indexes `_, | -| | `stagger `_, | -| | `pairwise `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Augmenting | `count_cycle `_, | -| | `intersperse `_, | -| | `padded `_, | -| | `adjacent `_, | -| | `groupby_transform `_, | -| | `padnone `_, | -| | `ncycles `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combining | `collapse `_, | -| | `sort_together `_, | -| | `interleave `_, | -| | `interleave_longest `_, | -| | `collate `_, | -| | `zip_offset `_, | -| | `dotproduct `_, | -| | `flatten `_, | -| | `roundrobin `_, | -| | `prepend `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Summarizing | `ilen `_, | -| | `first `_, | -| | `last `_, | -| | `one `_, | -| | `only `_, | -| | `unique_to_each `_, | -| | `locate `_, | -| | `rlocate `_, | -| | `consecutive_groups `_, | -| | `exactly_n `_, | -| | `run_length `_, | -| | `map_reduce `_, | -| | `all_equal `_, | -| | `first_true `_, | -| | `nth `_, | -| | `quantify `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Selecting | `islice_extended `_, | -| | `strip `_, | -| | `lstrip `_, | -| | `rstrip `_, | -| | `take `_, | -| | `tail `_, | -| | `unique_everseen `_, | -| | `unique_justseen `_ | -| | `filter_except `_ | -| | `map_except `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Combinatorics | `distinct_permutations `_, | -| | `distinct_combinations `_, | -| | `circular_shifts `_, | -| | `partitions `_, | -| | `set_partitions `_, | -| | `powerset `_, | -| | `random_product `_, | -| | `random_permutation `_, | -| | `random_combination `_, | -| | `random_combination_with_replacement `_, | -| | `nth_combination `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Wrapping | `always_iterable `_, | -| | `consumer `_, | -| | `with_iter `_, | -| | `iter_except `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| Others | `replace `_, | -| | `numeric_range `_, | -| | `always_reversible `_, | -| | `side_effect `_, | -| | `iterate `_, | -| | `difference `_, | -| | `make_decorator `_, | -| | `SequenceView `_, | -| | `time_limited `_, | -| | `consume `_, | -| | `tabulate `_, | -| | `repeatfunc `_ | -+------------------------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - - -Getting started -=============== - -To get started, install the library with `pip `_: - -.. code-block:: shell - - pip install more-itertools - -The recipes from the `itertools docs `_ -are included in the top-level package: - -.. code-block:: python - - >>> from more_itertools import flatten - >>> iterable = [(0, 1), (2, 3)] - >>> list(flatten(iterable)) - [0, 1, 2, 3] - -Several new recipes are available as well: - -.. code-block:: python - - >>> from more_itertools import chunked - >>> iterable = [0, 1, 2, 3, 4, 5, 6, 7, 8] - >>> list(chunked(iterable, 3)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - - >>> from more_itertools import spy - >>> iterable = (x * x for x in range(1, 6)) - >>> head, iterable = spy(iterable, n=3) - >>> list(head) - [1, 4, 9] - >>> list(iterable) - [1, 4, 9, 16, 25] - - - -For the full listing of functions, see the `API documentation `_. - -Development -=========== - -``more-itertools`` is maintained by `@erikrose `_ -and `@bbayles `_, with help from `many others `_. -If you have a problem or suggestion, please file a bug or pull request in this -repository. Thanks for contributing! - - -Version History -=============== - - - :noindex: - -7.2.0 ------ - -* New itertools - * distinct_combinations - * set_partitions (thanks to kbarrett) - * filter_except - * map_except - -7.1.0 ------ - -* New itertools - * ichunked (thanks davebelais and youtux) - * only (thanks jaraco) - -* Changes to existing itertools: - * numeric_range now supports ranges specified by - ``datetime.datetime`` and ``datetime.timedelta`` objects (thanks to MSeifert04 for tests). - * difference now supports an *initial* keyword argument. - - -* Other changes - * Various documentation fixes (thanks raimon49, pylang) - -7.0.0 ------ - -* New itertools: - * time_limited - * partitions (thanks to rominf and Saluev) - * substrings_indexes (thanks to rominf) - -* Changes to existing itertools: - * collapse now treats ``bytes`` objects the same as ``str`` objects. (thanks to Sweenpet) - -The major version update is due to the change in the default behavior of -collapse. It now treats ``bytes`` objects the same as ``str`` objects. -This aligns its behavior with always_iterable. - -.. code-block:: python - - >>> from more_itertools import collapse - >>> iterable = [[1, 2], b'345', [6]] - >>> print(list(collapse(iterable))) - [1, 2, b'345', 6] - -6.0.0 ------ - -* Major changes: - * Python 2.7 is no longer supported. The 5.0.0 release will be the last - version targeting Python 2.7. - * All future releases will target the active versions of Python 3. - As of 2019, those are Python 3.4 and above. - * The ``six`` library is no longer a dependency. - * The accumulate function is no longer part of this library. You - may import a better version from the standard ``itertools`` module. - -* Changes to existing itertools: - * The order of the parameters in grouper have changed to match - the latest recipe in the itertools documentation. Use of the old order - will be supported in this release, but emit a ``DeprecationWarning``. - The legacy behavior will be dropped in a future release. (thanks to jaraco) - * distinct_permutations was improved (thanks to jferard - see also `permutations with unique values `_ at StackOverflow.) - * An unused parameter was removed from substrings. (thanks to pylang) - -* Other changes: - * The docs for unique_everseen were improved. (thanks to jferard and MSeifert04) - * Several Python 2-isms were removed. (thanks to jaraco, MSeifert04, and hugovk) - -5.0.0 ------ - -* New itertools: - * split_into (thanks to rovyko) - * unzip (thanks to bmintz) - * substrings (thanks to pylang) - -* Changes to existing itertools: - * ilen was optimized a bit (thanks to MSeifert04, achampion, and bmintz) - * first_true now returns ``None`` by default. This is the reason for the major version bump - see below. (thanks to sk and OJFord) - -* Other changes: - * Some code for old Python versions was removed (thanks to hugovk) - * Some documentation mistakes were corrected (thanks to belm0 and hugovk) - * Tests now run properly on 32-bit versions of Python (thanks to Millak) - * Newer versions of CPython and PyPy are now tested against - -The major version update is due to the change in the default return value of -first_true. It's now ``None``. - -.. code-block:: python - - >>> from more_itertools import first_true - >>> iterable = [0, '', False, [], ()] # All these are False - >>> answer = first_true(iterable) - >>> print(answer) - None - -4.3.0 ------ - -* New itertools: - * last (thanks to tmshn) - * replace (thanks to pylang) - * rlocate (thanks to jferard and pylang) - -* Improvements to existing itertools: - * locate can now search for multiple items - -* Other changes: - * The docs now include a nice table of tools (thanks MSeifert04) - -4.2.0 ------ - -* New itertools: - * map_reduce (thanks to pylang) - * prepend (from the `Python 3.7 docs `_) - -* Improvements to existing itertools: - * bucket now complies with PEP 479 (thanks to irmen) - -* Other changes: - * Python 3.7 is now supported (thanks to irmen) - * Python 3.3 is no longer supported - * The test suite no longer requires third-party modules to run - * The API docs now include links to source code - -4.1.0 ------ - -* New itertools: - * split_at (thanks to michael-celani) - * circular_shifts (thanks to hiqua) - * make_decorator - see the blog post `Yo, I heard you like decorators `_ - for a tour (thanks to pylang) - * always_reversible (thanks to michael-celani) - * nth_combination (from the `Python 3.7 docs `_) - -* Improvements to existing itertools: - * seekable now has an ``elements`` method to return cached items. - * The performance tradeoffs between roundrobin and - interleave_longest are now documented (thanks michael-celani, - pylang, and MSeifert04) - -4.0.1 ------ - -* No code changes - this release fixes how the docs display on PyPI. - -4.0.0 ------ - -* New itertools: - * consecutive_groups (Based on the example in the `Python 2.4 docs `_) - * seekable (If you're looking for how to "reset" an iterator, - you're in luck!) - * exactly_n (thanks to michael-celani) - * run_length.encode and run_length.decode - * difference - -* Improvements to existing itertools: - * The number of items between filler elements in intersperse can - now be specified (thanks to pylang) - * distinct_permutations and peekable got some minor - adjustments (thanks to MSeifert04) - * always_iterable now returns an iterator object. It also now - allows different types to be considered iterable (thanks to jaraco) - * bucket can now limit the keys it stores in memory - * one now allows for custom exceptions (thanks to kalekundert) - -* Other changes: - * A few typos were fixed (thanks to EdwardBetts) - * All tests can now be run with ``python setup.py test`` - -The major version update is due to the change in the return value of always_iterable. -It now always returns iterator objects: - -.. code-block:: python - - >>> from more_itertools import always_iterable - # Non-iterable objects are wrapped with iter(tuple(obj)) - >>> always_iterable(12345) - - >>> list(always_iterable(12345)) - [12345] - # Iterable objects are wrapped with iter() - >>> always_iterable([1, 2, 3, 4, 5]) - - -3.2.0 ------ - -* New itertools: - * lstrip, rstrip, and strip - (thanks to MSeifert04 and pylang) - * islice_extended -* Improvements to existing itertools: - * Some bugs with slicing peekable-wrapped iterables were fixed - -3.1.0 ------ - -* New itertools: - * numeric_range (Thanks to BebeSparkelSparkel and MSeifert04) - * count_cycle (Thanks to BebeSparkelSparkel) - * locate (Thanks to pylang and MSeifert04) -* Improvements to existing itertools: - * A few itertools are now slightly faster due to some function - optimizations. (Thanks to MSeifert04) -* The docs have been substantially revised with installation notes, - categories for library functions, links, and more. (Thanks to pylang) - - -3.0.0 ------ - -* Removed itertools: - * ``context`` has been removed due to a design flaw - see below for - replacement options. (thanks to NeilGirdhar) -* Improvements to existing itertools: - * ``side_effect`` now supports ``before`` and ``after`` keyword - arguments. (Thanks to yardsale8) -* PyPy and PyPy3 are now supported. - -The major version change is due to the removal of the ``context`` function. -Replace it with standard ``with`` statement context management: - -.. code-block:: python - - # Don't use context() anymore - file_obj = StringIO() - consume(print(x, file=f) for f in context(file_obj) for x in u'123') - - # Use a with statement instead - file_obj = StringIO() - with file_obj as f: - consume(print(x, file=f) for x in u'123') - -2.6.0 ------ - -* New itertools: - * ``adjacent`` and ``groupby_transform`` (Thanks to diazona) - * ``always_iterable`` (Thanks to jaraco) - * (Removed in 3.0.0) ``context`` (Thanks to yardsale8) - * ``divide`` (Thanks to mozbhearsum) -* Improvements to existing itertools: - * ``ilen`` is now slightly faster. (Thanks to wbolster) - * ``peekable`` can now prepend items to an iterable. (Thanks to diazona) - -2.5.0 ------ - -* New itertools: - * ``distribute`` (Thanks to mozbhearsum and coady) - * ``sort_together`` (Thanks to clintval) - * ``stagger`` and ``zip_offset`` (Thanks to joshbode) - * ``padded`` -* Improvements to existing itertools: - * ``peekable`` now handles negative indexes and slices with negative - components properly. - * ``intersperse`` is now slightly faster. (Thanks to pylang) - * ``windowed`` now accepts a ``step`` keyword argument. - (Thanks to pylang) -* Python 3.6 is now supported. - -2.4.1 ------ - -* Move docs 100% to readthedocs.io. - -2.4 ------ - -* New itertools: - * ``accumulate``, ``all_equal``, ``first_true``, ``partition``, and - ``tail`` from the itertools documentation. - * ``bucket`` (Thanks to Rosuav and cvrebert) - * ``collapse`` (Thanks to abarnet) - * ``interleave`` and ``interleave_longest`` (Thanks to abarnet) - * ``side_effect`` (Thanks to nvie) - * ``sliced`` (Thanks to j4mie and coady) - * ``split_before`` and ``split_after`` (Thanks to astronouth7303) - * ``spy`` (Thanks to themiurgo and mathieulongtin) -* Improvements to existing itertools: - * ``chunked`` is now simpler and more friendly to garbage collection. - (Contributed by coady, with thanks to piskvorky) - * ``collate`` now delegates to ``heapq.merge`` when possible. - (Thanks to kmike and julianpistorius) - * ``peekable``-wrapped iterables are now indexable and sliceable. - Iterating through ``peekable``-wrapped iterables is also faster. - * ``one`` and ``unique_to_each`` have been simplified. - (Thanks to coady) - - -2.3 ------ - -* Added ``one`` from ``jaraco.util.itertools``. (Thanks, jaraco!) -* Added ``distinct_permutations`` and ``unique_to_each``. (Contributed by - bbayles) -* Added ``windowed``. (Contributed by bbayles, with thanks to buchanae, - jaraco, and abarnert) -* Simplified the implementation of ``chunked``. (Thanks, nvie!) -* Python 3.5 is now supported. Python 2.6 is no longer supported. -* Python 3 is now supported directly; there is no 2to3 step. - -2.2 ------ - -* Added ``iterate`` and ``with_iter``. (Thanks, abarnert!) - -2.1 ------ - -* Added (tested!) implementations of the recipes from the itertools - documentation. (Thanks, Chris Lonnen!) -* Added ``ilen``. (Thanks for the inspiration, Matt Basta!) - -2.0 ------ - -* ``chunked`` now returns lists rather than tuples. After all, they're - homogeneous. This slightly backward-incompatible change is the reason for - the major version bump. -* Added ``@consumer``. -* Improved test machinery. - -1.1 ------ - -* Added ``first`` function. -* Added Python 3 support. -* Added a default arg to ``peekable.peek()``. -* Noted how to easily test whether a peekable iterator is exhausted. -* Rewrote documentation. - -1.0 ------ - -* Initial release, with ``collate``, ``peekable``, and ``chunked``. Could - really use better docs. - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/RECORD deleted file mode 100644 index cd0bac5..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/RECORD +++ /dev/null @@ -1,18 +0,0 @@ -more_itertools-7.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -more_itertools-7.2.0.dist-info/LICENSE,sha256=CfHIyelBrz5YTVlkHqm4fYPAyw_QB-te85Gn4mQ8GkY,1053 -more_itertools-7.2.0.dist-info/METADATA,sha256=Z-MGFuCluxO39U0eOB0d1T-JTWA86FF7AXNgO2e141I,38702 -more_itertools-7.2.0.dist-info/RECORD,, -more_itertools-7.2.0.dist-info/WHEEL,sha256=S8S5VL-stOTSZDYxHyf0KP7eds0J72qrK0Evu3TfyAY,92 -more_itertools-7.2.0.dist-info/top_level.txt,sha256=fAuqRXu9LPhxdB9ujJowcFOu1rZ8wzSpOW9_jlKis6M,15 -more_itertools/__init__.py,sha256=S-n6S9N3UplqU3p-7AV-1Znl6yJwuXsJtqrlEq9tOUw,87 -more_itertools/__pycache__/__init__.cpython-37.pyc,, -more_itertools/__pycache__/more.cpython-37.pyc,, -more_itertools/__pycache__/recipes.cpython-37.pyc,, -more_itertools/more.py,sha256=4lZSk1NeNsESx1sjTjKL6G7CEOGIqnjFKEj0kLTcI-0,82859 -more_itertools/recipes.py,sha256=4X8EBZLOEyKKVlacSp2jAg56-GEo1jhCU2eH4WlbEC4,15235 -more_itertools/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -more_itertools/tests/__pycache__/__init__.cpython-37.pyc,, -more_itertools/tests/__pycache__/test_more.cpython-37.pyc,, -more_itertools/tests/__pycache__/test_recipes.cpython-37.pyc,, -more_itertools/tests/test_more.py,sha256=T2XyR_92J4lcOQud8IwlUjbc_ERXTHJnvcHwL_I2ce4,92362 -more_itertools/tests/test_recipes.py,sha256=UxBADwAu1U0FlNXtU6ybKkHhh1Lt2opwsTFNnSbJ7uU,19478 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/WHEEL deleted file mode 100644 index c57a597..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/top_level.txt deleted file mode 100644 index a5035be..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools-7.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -more_itertools diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/__init__.py deleted file mode 100644 index bba462c..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from more_itertools.more import * # noqa -from more_itertools.recipes import * # noqa diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/more.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/more.py deleted file mode 100644 index 3d715ac..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/more.py +++ /dev/null @@ -1,2630 +0,0 @@ -from collections import Counter, defaultdict, deque -from collections.abc import Sequence -from functools import partial, wraps -from heapq import merge -from itertools import ( - combinations, - chain, - compress, - count, - cycle, - dropwhile, - groupby, - islice, - repeat, - starmap, - takewhile, - tee, - zip_longest, -) -from operator import itemgetter, lt, gt, sub -from sys import maxsize, version_info -from time import monotonic - -from .recipes import consume, flatten, powerset, take, unique_everseen - -__all__ = [ - 'adjacent', - 'always_iterable', - 'always_reversible', - 'bucket', - 'chunked', - 'circular_shifts', - 'collapse', - 'collate', - 'consecutive_groups', - 'consumer', - 'count_cycle', - 'difference', - 'distinct_combinations', - 'distinct_permutations', - 'distribute', - 'divide', - 'exactly_n', - 'filter_except', - 'first', - 'groupby_transform', - 'ilen', - 'interleave_longest', - 'interleave', - 'intersperse', - 'islice_extended', - 'iterate', - 'ichunked', - 'last', - 'locate', - 'lstrip', - 'make_decorator', - 'map_except', - 'map_reduce', - 'numeric_range', - 'one', - 'only', - 'padded', - 'partitions', - 'set_partitions', - 'peekable', - 'replace', - 'rlocate', - 'rstrip', - 'run_length', - 'seekable', - 'SequenceView', - 'side_effect', - 'sliced', - 'sort_together', - 'split_at', - 'split_after', - 'split_before', - 'split_into', - 'spy', - 'stagger', - 'strip', - 'substrings', - 'substrings_indexes', - 'time_limited', - 'unique_to_each', - 'unzip', - 'windowed', - 'with_iter', - 'zip_offset', -] - -_marker = object() - - -def chunked(iterable, n): - """Break *iterable* into lists of length *n*: - - >>> list(chunked([1, 2, 3, 4, 5, 6], 3)) - [[1, 2, 3], [4, 5, 6]] - - If the length of *iterable* is not evenly divisible by *n*, the last - returned list will be shorter: - - >>> list(chunked([1, 2, 3, 4, 5, 6, 7, 8], 3)) - [[1, 2, 3], [4, 5, 6], [7, 8]] - - To use a fill-in value instead, see the :func:`grouper` recipe. - - :func:`chunked` is useful for splitting up a computation on a large number - of keys into batches, to be pickled and sent off to worker processes. One - example is operations on rows in MySQL, which does not implement - server-side cursors properly and would otherwise load the entire dataset - into RAM on the client. - - """ - return iter(partial(take, n, iter(iterable)), []) - - -def first(iterable, default=_marker): - """Return the first item of *iterable*, or *default* if *iterable* is - empty. - - >>> first([0, 1, 2, 3]) - 0 - >>> first([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - - :func:`first` is useful when you have a generator of expensive-to-retrieve - values and want any arbitrary one. It is marginally shorter than - ``next(iter(iterable), default)``. - - """ - try: - return next(iter(iterable)) - except StopIteration: - # I'm on the edge about raising ValueError instead of StopIteration. At - # the moment, ValueError wins, because the caller could conceivably - # want to do something different with flow control when I raise the - # exception, and it's weird to explicitly catch StopIteration. - if default is _marker: - raise ValueError('first() was called on an empty iterable, and no ' - 'default value was provided.') - return default - - -def last(iterable, default=_marker): - """Return the last item of *iterable*, or *default* if *iterable* is - empty. - - >>> last([0, 1, 2, 3]) - 3 - >>> last([], 'some default') - 'some default' - - If *default* is not provided and there are no items in the iterable, - raise ``ValueError``. - """ - try: - try: - # Try to access the last item directly - return iterable[-1] - except (TypeError, AttributeError, KeyError): - # If not slice-able, iterate entirely using length-1 deque - return deque(iterable, maxlen=1)[0] - except IndexError: # If the iterable was empty - if default is _marker: - raise ValueError('last() was called on an empty iterable, and no ' - 'default value was provided.') - return default - - -class peekable: - """Wrap an iterator to allow lookahead and prepending elements. - - Call :meth:`peek` on the result to get the value that will be returned - by :func:`next`. This won't advance the iterator: - - >>> p = peekable(['a', 'b']) - >>> p.peek() - 'a' - >>> next(p) - 'a' - - Pass :meth:`peek` a default value to return that instead of raising - ``StopIteration`` when the iterator is exhausted. - - >>> p = peekable([]) - >>> p.peek('hi') - 'hi' - - peekables also offer a :meth:`prepend` method, which "inserts" items - at the head of the iterable: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> p.peek() - 11 - >>> list(p) - [11, 12, 1, 2, 3] - - peekables can be indexed. Index 0 is the item that will be returned by - :func:`next`, index 1 is the item after that, and so on: - The values up to the given index will be cached. - - >>> p = peekable(['a', 'b', 'c', 'd']) - >>> p[0] - 'a' - >>> p[1] - 'b' - >>> next(p) - 'a' - - Negative indexes are supported, but be aware that they will cache the - remaining items in the source iterator, which may require significant - storage. - - To check whether a peekable is exhausted, check its truth value: - - >>> p = peekable(['a', 'b']) - >>> if p: # peekable has items - ... list(p) - ['a', 'b'] - >>> if not p: # peekable is exhaused - ... list(p) - [] - - """ - def __init__(self, iterable): - self._it = iter(iterable) - self._cache = deque() - - def __iter__(self): - return self - - def __bool__(self): - try: - self.peek() - except StopIteration: - return False - return True - - def peek(self, default=_marker): - """Return the item that will be next returned from ``next()``. - - Return ``default`` if there are no items left. If ``default`` is not - provided, raise ``StopIteration``. - - """ - if not self._cache: - try: - self._cache.append(next(self._it)) - except StopIteration: - if default is _marker: - raise - return default - return self._cache[0] - - def prepend(self, *items): - """Stack up items to be the next ones returned from ``next()`` or - ``self.peek()``. The items will be returned in - first in, first out order:: - - >>> p = peekable([1, 2, 3]) - >>> p.prepend(10, 11, 12) - >>> next(p) - 10 - >>> list(p) - [11, 12, 1, 2, 3] - - It is possible, by prepending items, to "resurrect" a peekable that - previously raised ``StopIteration``. - - >>> p = peekable([]) - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - >>> p.prepend(1) - >>> next(p) - 1 - >>> next(p) - Traceback (most recent call last): - ... - StopIteration - - """ - self._cache.extendleft(reversed(items)) - - def __next__(self): - if self._cache: - return self._cache.popleft() - - return next(self._it) - - def _get_slice(self, index): - # Normalize the slice's arguments - step = 1 if (index.step is None) else index.step - if step > 0: - start = 0 if (index.start is None) else index.start - stop = maxsize if (index.stop is None) else index.stop - elif step < 0: - start = -1 if (index.start is None) else index.start - stop = (-maxsize - 1) if (index.stop is None) else index.stop - else: - raise ValueError('slice step cannot be zero') - - # If either the start or stop index is negative, we'll need to cache - # the rest of the iterable in order to slice from the right side. - if (start < 0) or (stop < 0): - self._cache.extend(self._it) - # Otherwise we'll need to find the rightmost index and cache to that - # point. - else: - n = min(max(start, stop) + 1, maxsize) - cache_len = len(self._cache) - if n >= cache_len: - self._cache.extend(islice(self._it, n - cache_len)) - - return list(self._cache)[index] - - def __getitem__(self, index): - if isinstance(index, slice): - return self._get_slice(index) - - cache_len = len(self._cache) - if index < 0: - self._cache.extend(self._it) - elif index >= cache_len: - self._cache.extend(islice(self._it, index + 1 - cache_len)) - - return self._cache[index] - - -def _collate(*iterables, key=lambda a: a, reverse=False): - """Helper for ``collate()``, called when the user is using the ``reverse`` - or ``key`` keyword arguments on Python versions below 3.5. - - """ - min_or_max = partial(max if reverse else min, key=itemgetter(0)) - peekables = [peekable(it) for it in iterables] - peekables = [p for p in peekables if p] # Kill empties. - while peekables: - _, p = min_or_max((key(p.peek()), p) for p in peekables) - yield next(p) - peekables = [x for x in peekables if x] - - -def collate(*iterables, **kwargs): - """Return a sorted merge of the items from each of several already-sorted - *iterables*. - - >>> list(collate('ACDZ', 'AZ', 'JKL')) - ['A', 'A', 'C', 'D', 'J', 'K', 'L', 'Z', 'Z'] - - Works lazily, keeping only the next value from each iterable in memory. Use - :func:`collate` to, for example, perform a n-way mergesort of items that - don't fit in memory. - - If a *key* function is specified, the iterables will be sorted according - to its result: - - >>> key = lambda s: int(s) # Sort by numeric value, not by string - >>> list(collate(['1', '10'], ['2', '11'], key=key)) - ['1', '2', '10', '11'] - - - If the *iterables* are sorted in descending order, set *reverse* to - ``True``: - - >>> list(collate([5, 3, 1], [4, 2, 0], reverse=True)) - [5, 4, 3, 2, 1, 0] - - If the elements of the passed-in iterables are out of order, you might get - unexpected results. - - On Python 3.5+, this function is an alias for :func:`heapq.merge`. - - """ - if not kwargs: - return merge(*iterables) - - return _collate(*iterables, **kwargs) - - -# If using Python version 3.5 or greater, heapq.merge() will be faster than -# collate - use that instead. -if version_info >= (3, 5, 0): - _collate_docstring = collate.__doc__ - collate = partial(merge) - collate.__doc__ = _collate_docstring - - -def consumer(func): - """Decorator that automatically advances a PEP-342-style "reverse iterator" - to its first yield point so you don't have to call ``next()`` on it - manually. - - >>> @consumer - ... def tally(): - ... i = 0 - ... while True: - ... print('Thing number %s is %s.' % (i, (yield))) - ... i += 1 - ... - >>> t = tally() - >>> t.send('red') - Thing number 0 is red. - >>> t.send('fish') - Thing number 1 is fish. - - Without the decorator, you would have to call ``next(t)`` before - ``t.send()`` could be used. - - """ - @wraps(func) - def wrapper(*args, **kwargs): - gen = func(*args, **kwargs) - next(gen) - return gen - return wrapper - - -def ilen(iterable): - """Return the number of items in *iterable*. - - >>> ilen(x for x in range(1000000) if x % 3 == 0) - 333334 - - This consumes the iterable, so handle with care. - - """ - # This approach was selected because benchmarks showed it's likely the - # fastest of the known implementations at the time of writing. - # See GitHub tracker: #236, #230. - counter = count() - deque(zip(iterable, counter), maxlen=0) - return next(counter) - - -def iterate(func, start): - """Return ``start``, ``func(start)``, ``func(func(start))``, ... - - >>> from itertools import islice - >>> list(islice(iterate(lambda x: 2*x, 1), 10)) - [1, 2, 4, 8, 16, 32, 64, 128, 256, 512] - - """ - while True: - yield start - start = func(start) - - -def with_iter(context_manager): - """Wrap an iterable in a ``with`` statement, so it closes once exhausted. - - For example, this will close the file when the iterator is exhausted:: - - upper_lines = (line.upper() for line in with_iter(open('foo'))) - - Any context manager which returns an iterable is a candidate for - ``with_iter``. - - """ - with context_manager as iterable: - yield from iterable - - -def one(iterable, too_short=None, too_long=None): - """Return the first item from *iterable*, which is expected to contain only - that item. Raise an exception if *iterable* is empty or has more than one - item. - - :func:`one` is useful for ensuring that an iterable contains only one item. - For example, it can be used to retrieve the result of a database query - that is expected to return a single row. - - If *iterable* is empty, ``ValueError`` will be raised. You may specify a - different exception with the *too_short* keyword: - - >>> it = [] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (expected 1)' - >>> too_short = IndexError('too few items') - >>> one(it, too_short=too_short) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - IndexError: too few items - - Similarly, if *iterable* contains more than one item, ``ValueError`` will - be raised. You may specify a different exception with the *too_long* - keyword: - - >>> it = ['too', 'many'] - >>> one(it) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (expected 1)' - >>> too_long = RuntimeError - >>> one(it, too_long=too_long) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - RuntimeError - - Note that :func:`one` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check iterable - contents less destructively. - - """ - it = iter(iterable) - - try: - value = next(it) - except StopIteration: - raise too_short or ValueError('too few items in iterable (expected 1)') - - try: - next(it) - except StopIteration: - pass - else: - raise too_long or ValueError('too many items in iterable (expected 1)') - - return value - - -def distinct_permutations(iterable): - """Yield successive distinct permutations of the elements in *iterable*. - - >>> sorted(distinct_permutations([1, 0, 1])) - [(0, 1, 1), (1, 0, 1), (1, 1, 0)] - - Equivalent to ``set(permutations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - Duplicate permutations arise when there are duplicated elements in the - input iterable. The number of items returned is - `n! / (x_1! * x_2! * ... * x_n!)`, where `n` is the total number of - items input, and each `x_i` is the count of a distinct item in the input - sequence. - - """ - def make_new_permutations(pool, e): - """Internal helper function. - The output permutations are built up by adding element *e* to the - current *permutations* at every possible position. - The key idea is to keep repeated elements (reverse) ordered: - if e1 == e2 and e1 is before e2 in the iterable, then all permutations - with e1 before e2 are ignored. - - """ - for perm in pool: - for j in range(len(perm)): - yield perm[:j] + (e,) + perm[j:] - if perm[j] == e: - break - else: - yield perm + (e,) - - permutations = [()] - for e in iterable: - permutations = make_new_permutations(permutations, e) - - return (tuple(t) for t in permutations) - - -def intersperse(e, iterable, n=1): - """Intersperse filler element *e* among the items in *iterable*, leaving - *n* items between each filler element. - - >>> list(intersperse('!', [1, 2, 3, 4, 5])) - [1, '!', 2, '!', 3, '!', 4, '!', 5] - - >>> list(intersperse(None, [1, 2, 3, 4, 5], n=2)) - [1, 2, None, 3, 4, None, 5] - - """ - if n == 0: - raise ValueError('n must be > 0') - elif n == 1: - # interleave(repeat(e), iterable) -> e, x_0, e, e, x_1, e, x_2... - # islice(..., 1, None) -> x_0, e, e, x_1, e, x_2... - return islice(interleave(repeat(e), iterable), 1, None) - else: - # interleave(filler, chunks) -> [e], [x_0, x_1], [e], [x_2, x_3]... - # islice(..., 1, None) -> [x_0, x_1], [e], [x_2, x_3]... - # flatten(...) -> x_0, x_1, e, x_2, x_3... - filler = repeat([e]) - chunks = chunked(iterable, n) - return flatten(islice(interleave(filler, chunks), 1, None)) - - -def unique_to_each(*iterables): - """Return the elements from each of the input iterables that aren't in the - other input iterables. - - For example, suppose you have a set of packages, each with a set of - dependencies:: - - {'pkg_1': {'A', 'B'}, 'pkg_2': {'B', 'C'}, 'pkg_3': {'B', 'D'}} - - If you remove one package, which dependencies can also be removed? - - If ``pkg_1`` is removed, then ``A`` is no longer necessary - it is not - associated with ``pkg_2`` or ``pkg_3``. Similarly, ``C`` is only needed for - ``pkg_2``, and ``D`` is only needed for ``pkg_3``:: - - >>> unique_to_each({'A', 'B'}, {'B', 'C'}, {'B', 'D'}) - [['A'], ['C'], ['D']] - - If there are duplicates in one input iterable that aren't in the others - they will be duplicated in the output. Input order is preserved:: - - >>> unique_to_each("mississippi", "missouri") - [['p', 'p'], ['o', 'u', 'r']] - - It is assumed that the elements of each iterable are hashable. - - """ - pool = [list(it) for it in iterables] - counts = Counter(chain.from_iterable(map(set, pool))) - uniques = {element for element in counts if counts[element] == 1} - return [list(filter(uniques.__contains__, it)) for it in pool] - - -def windowed(seq, n, fillvalue=None, step=1): - """Return a sliding window of width *n* over the given iterable. - - >>> all_windows = windowed([1, 2, 3, 4, 5], 3) - >>> list(all_windows) - [(1, 2, 3), (2, 3, 4), (3, 4, 5)] - - When the window is larger than the iterable, *fillvalue* is used in place - of missing values:: - - >>> list(windowed([1, 2, 3], 4)) - [(1, 2, 3, None)] - - Each window will advance in increments of *step*: - - >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) - [(1, 2, 3), (3, 4, 5), (5, 6, '!')] - - To slide into the iterable's items, use :func:`chain` to add filler items - to the left: - - >>> iterable = [1, 2, 3, 4] - >>> n = 3 - >>> padding = [None] * (n - 1) - >>> list(windowed(chain(padding, iterable), 3)) - [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] - - """ - if n < 0: - raise ValueError('n must be >= 0') - if n == 0: - yield tuple() - return - if step < 1: - raise ValueError('step must be >= 1') - - it = iter(seq) - window = deque([], n) - append = window.append - - # Initial deque fill - for _ in range(n): - append(next(it, fillvalue)) - yield tuple(window) - - # Appending new items to the right causes old items to fall off the left - i = 0 - for item in it: - append(item) - i = (i + 1) % step - if i % step == 0: - yield tuple(window) - - # If there are items from the iterable in the window, pad with the given - # value and emit them. - if (i % step) and (step - i < n): - for _ in range(step - i): - append(fillvalue) - yield tuple(window) - - -def substrings(iterable): - """Yield all of the substrings of *iterable*. - - >>> [''.join(s) for s in substrings('more')] - ['m', 'o', 'r', 'e', 'mo', 'or', 're', 'mor', 'ore', 'more'] - - Note that non-string iterables can also be subdivided. - - >>> list(substrings([0, 1, 2])) - [(0,), (1,), (2,), (0, 1), (1, 2), (0, 1, 2)] - - """ - # The length-1 substrings - seq = [] - for item in iter(iterable): - seq.append(item) - yield (item,) - seq = tuple(seq) - item_count = len(seq) - - # And the rest - for n in range(2, item_count + 1): - for i in range(item_count - n + 1): - yield seq[i:i + n] - - -def substrings_indexes(seq, reverse=False): - """Yield all substrings and their positions in *seq* - - The items yielded will be a tuple of the form ``(substr, i, j)``, where - ``substr == seq[i:j]``. - - This function only works for iterables that support slicing, such as - ``str`` objects. - - >>> for item in substrings_indexes('more'): - ... print(item) - ('m', 0, 1) - ('o', 1, 2) - ('r', 2, 3) - ('e', 3, 4) - ('mo', 0, 2) - ('or', 1, 3) - ('re', 2, 4) - ('mor', 0, 3) - ('ore', 1, 4) - ('more', 0, 4) - - Set *reverse* to ``True`` to yield the same items in the opposite order. - - - """ - r = range(1, len(seq) + 1) - if reverse: - r = reversed(r) - return ( - (seq[i:i + L], i, i + L) for L in r for i in range(len(seq) - L + 1) - ) - - -class bucket: - """Wrap *iterable* and return an object that buckets it iterable into - child iterables based on a *key* function. - - >>> iterable = ['a1', 'b1', 'c1', 'a2', 'b2', 'c2', 'b3'] - >>> s = bucket(iterable, key=lambda x: x[0]) - >>> a_iterable = s['a'] - >>> next(a_iterable) - 'a1' - >>> next(a_iterable) - 'a2' - >>> list(s['b']) - ['b1', 'b2', 'b3'] - - The original iterable will be advanced and its items will be cached until - they are used by the child iterables. This may require significant storage. - - By default, attempting to select a bucket to which no items belong will - exhaust the iterable and cache all values. - If you specify a *validator* function, selected buckets will instead be - checked against it. - - >>> from itertools import count - >>> it = count(1, 2) # Infinite sequence of odd numbers - >>> key = lambda x: x % 10 # Bucket by last digit - >>> validator = lambda x: x in {1, 3, 5, 7, 9} # Odd digits only - >>> s = bucket(it, key=key, validator=validator) - >>> 2 in s - False - >>> list(s[2]) - [] - - """ - def __init__(self, iterable, key, validator=None): - self._it = iter(iterable) - self._key = key - self._cache = defaultdict(deque) - self._validator = validator or (lambda x: True) - - def __contains__(self, value): - if not self._validator(value): - return False - - try: - item = next(self[value]) - except StopIteration: - return False - else: - self._cache[value].appendleft(item) - - return True - - def _get_values(self, value): - """ - Helper to yield items from the parent iterator that match *value*. - Items that don't match are stored in the local cache as they - are encountered. - """ - while True: - # If we've cached some items that match the target value, emit - # the first one and evict it from the cache. - if self._cache[value]: - yield self._cache[value].popleft() - # Otherwise we need to advance the parent iterator to search for - # a matching item, caching the rest. - else: - while True: - try: - item = next(self._it) - except StopIteration: - return - item_value = self._key(item) - if item_value == value: - yield item - break - elif self._validator(item_value): - self._cache[item_value].append(item) - - def __getitem__(self, value): - if not self._validator(value): - return iter(()) - - return self._get_values(value) - - -def spy(iterable, n=1): - """Return a 2-tuple with a list containing the first *n* elements of - *iterable*, and an iterator with the same items as *iterable*. - This allows you to "look ahead" at the items in the iterable without - advancing it. - - There is one item in the list by default: - - >>> iterable = 'abcdefg' - >>> head, iterable = spy(iterable) - >>> head - ['a'] - >>> list(iterable) - ['a', 'b', 'c', 'd', 'e', 'f', 'g'] - - You may use unpacking to retrieve items instead of lists: - - >>> (head,), iterable = spy('abcdefg') - >>> head - 'a' - >>> (first, second), iterable = spy('abcdefg', 2) - >>> first - 'a' - >>> second - 'b' - - The number of items requested can be larger than the number of items in - the iterable: - - >>> iterable = [1, 2, 3, 4, 5] - >>> head, iterable = spy(iterable, 10) - >>> head - [1, 2, 3, 4, 5] - >>> list(iterable) - [1, 2, 3, 4, 5] - - """ - it = iter(iterable) - head = take(n, it) - - return head, chain(head, it) - - -def interleave(*iterables): - """Return a new iterable yielding from each iterable in turn, - until the shortest is exhausted. - - >>> list(interleave([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7] - - For a version that doesn't terminate after the shortest iterable is - exhausted, see :func:`interleave_longest`. - - """ - return chain.from_iterable(zip(*iterables)) - - -def interleave_longest(*iterables): - """Return a new iterable yielding from each iterable in turn, - skipping any that are exhausted. - - >>> list(interleave_longest([1, 2, 3], [4, 5], [6, 7, 8])) - [1, 4, 6, 2, 5, 7, 3, 8] - - This function produces the same output as :func:`roundrobin`, but may - perform better for some inputs (in particular when the number of iterables - is large). - - """ - i = chain.from_iterable(zip_longest(*iterables, fillvalue=_marker)) - return (x for x in i if x is not _marker) - - -def collapse(iterable, base_type=None, levels=None): - """Flatten an iterable with multiple levels of nesting (e.g., a list of - lists of tuples) into non-iterable types. - - >>> iterable = [(1, 2), ([3, 4], [[5], [6]])] - >>> list(collapse(iterable)) - [1, 2, 3, 4, 5, 6] - - Binary and text strings are not considered iterable and - will not be collapsed. - - To avoid collapsing other types, specify *base_type*: - - >>> iterable = ['ab', ('cd', 'ef'), ['gh', 'ij']] - >>> list(collapse(iterable, base_type=tuple)) - ['ab', ('cd', 'ef'), 'gh', 'ij'] - - Specify *levels* to stop flattening after a certain level: - - >>> iterable = [('a', ['b']), ('c', ['d'])] - >>> list(collapse(iterable)) # Fully flattened - ['a', 'b', 'c', 'd'] - >>> list(collapse(iterable, levels=1)) # Only one level flattened - ['a', ['b'], 'c', ['d']] - - """ - def walk(node, level): - if ( - ((levels is not None) and (level > levels)) or - isinstance(node, (str, bytes)) or - ((base_type is not None) and isinstance(node, base_type)) - ): - yield node - return - - try: - tree = iter(node) - except TypeError: - yield node - return - else: - for child in tree: - for x in walk(child, level + 1): - yield x - - yield from walk(iterable, 0) - - -def side_effect(func, iterable, chunk_size=None, before=None, after=None): - """Invoke *func* on each item in *iterable* (or on each *chunk_size* group - of items) before yielding the item. - - `func` must be a function that takes a single argument. Its return value - will be discarded. - - *before* and *after* are optional functions that take no arguments. They - will be executed before iteration starts and after it ends, respectively. - - `side_effect` can be used for logging, updating progress bars, or anything - that is not functionally "pure." - - Emitting a status message: - - >>> from more_itertools import consume - >>> func = lambda item: print('Received {}'.format(item)) - >>> consume(side_effect(func, range(2))) - Received 0 - Received 1 - - Operating on chunks of items: - - >>> pair_sums = [] - >>> func = lambda chunk: pair_sums.append(sum(chunk)) - >>> list(side_effect(func, [0, 1, 2, 3, 4, 5], 2)) - [0, 1, 2, 3, 4, 5] - >>> list(pair_sums) - [1, 5, 9] - - Writing to a file-like object: - - >>> from io import StringIO - >>> from more_itertools import consume - >>> f = StringIO() - >>> func = lambda x: print(x, file=f) - >>> before = lambda: print(u'HEADER', file=f) - >>> after = f.close - >>> it = [u'a', u'b', u'c'] - >>> consume(side_effect(func, it, before=before, after=after)) - >>> f.closed - True - - """ - try: - if before is not None: - before() - - if chunk_size is None: - for item in iterable: - func(item) - yield item - else: - for chunk in chunked(iterable, chunk_size): - func(chunk) - yield from chunk - finally: - if after is not None: - after() - - -def sliced(seq, n): - """Yield slices of length *n* from the sequence *seq*. - - >>> list(sliced((1, 2, 3, 4, 5, 6), 3)) - [(1, 2, 3), (4, 5, 6)] - - If the length of the sequence is not divisible by the requested slice - length, the last slice will be shorter. - - >>> list(sliced((1, 2, 3, 4, 5, 6, 7, 8), 3)) - [(1, 2, 3), (4, 5, 6), (7, 8)] - - This function will only work for iterables that support slicing. - For non-sliceable iterables, see :func:`chunked`. - - """ - return takewhile(bool, (seq[i: i + n] for i in count(0, n))) - - -def split_at(iterable, pred): - """Yield lists of items from *iterable*, where each list is delimited by - an item where callable *pred* returns ``True``. The lists do not include - the delimiting items. - - >>> list(split_at('abcdcba', lambda x: x == 'b')) - [['a'], ['c', 'd', 'c'], ['a']] - - >>> list(split_at(range(10), lambda n: n % 2 == 1)) - [[0], [2], [4], [6], [8], []] - """ - buf = [] - for item in iterable: - if pred(item): - yield buf - buf = [] - else: - buf.append(item) - yield buf - - -def split_before(iterable, pred): - """Yield lists of items from *iterable*, where each list starts with an - item where callable *pred* returns ``True``: - - >>> list(split_before('OneTwo', lambda s: s.isupper())) - [['O', 'n', 'e'], ['T', 'w', 'o']] - - >>> list(split_before(range(10), lambda n: n % 3 == 0)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]] - - """ - buf = [] - for item in iterable: - if pred(item) and buf: - yield buf - buf = [] - buf.append(item) - yield buf - - -def split_after(iterable, pred): - """Yield lists of items from *iterable*, where each list ends with an - item where callable *pred* returns ``True``: - - >>> list(split_after('one1two2', lambda s: s.isdigit())) - [['o', 'n', 'e', '1'], ['t', 'w', 'o', '2']] - - >>> list(split_after(range(10), lambda n: n % 3 == 0)) - [[0], [1, 2, 3], [4, 5, 6], [7, 8, 9]] - - """ - buf = [] - for item in iterable: - buf.append(item) - if pred(item) and buf: - yield buf - buf = [] - if buf: - yield buf - - -def split_into(iterable, sizes): - """Yield a list of sequential items from *iterable* of length 'n' for each - integer 'n' in *sizes*. - - >>> list(split_into([1,2,3,4,5,6], [1,2,3])) - [[1], [2, 3], [4, 5, 6]] - - If the sum of *sizes* is smaller than the length of *iterable*, then the - remaining items of *iterable* will not be returned. - - >>> list(split_into([1,2,3,4,5,6], [2,3])) - [[1, 2], [3, 4, 5]] - - If the sum of *sizes* is larger than the length of *iterable*, fewer items - will be returned in the iteration that overruns *iterable* and further - lists will be empty: - - >>> list(split_into([1,2,3,4], [1,2,3,4])) - [[1], [2, 3], [4], []] - - When a ``None`` object is encountered in *sizes*, the returned list will - contain items up to the end of *iterable* the same way that itertools.slice - does: - - >>> list(split_into([1,2,3,4,5,6,7,8,9,0], [2,3,None])) - [[1, 2], [3, 4, 5], [6, 7, 8, 9, 0]] - - :func:`split_into` can be useful for grouping a series of items where the - sizes of the groups are not uniform. An example would be where in a row - from a table, multiple columns represent elements of the same feature - (e.g. a point represented by x,y,z) but, the format is not the same for - all columns. - """ - # convert the iterable argument into an iterator so its contents can - # be consumed by islice in case it is a generator - it = iter(iterable) - - for size in sizes: - if size is None: - yield list(it) - return - else: - yield list(islice(it, size)) - - -def padded(iterable, fillvalue=None, n=None, next_multiple=False): - """Yield the elements from *iterable*, followed by *fillvalue*, such that - at least *n* items are emitted. - - >>> list(padded([1, 2, 3], '?', 5)) - [1, 2, 3, '?', '?'] - - If *next_multiple* is ``True``, *fillvalue* will be emitted until the - number of items emitted is a multiple of *n*:: - - >>> list(padded([1, 2, 3, 4], n=3, next_multiple=True)) - [1, 2, 3, 4, None, None] - - If *n* is ``None``, *fillvalue* will be emitted indefinitely. - - """ - it = iter(iterable) - if n is None: - yield from chain(it, repeat(fillvalue)) - elif n < 1: - raise ValueError('n must be at least 1') - else: - item_count = 0 - for item in it: - yield item - item_count += 1 - - remaining = (n - item_count) % n if next_multiple else n - item_count - for _ in range(remaining): - yield fillvalue - - -def distribute(n, iterable): - """Distribute the items from *iterable* among *n* smaller iterables. - - >>> group_1, group_2 = distribute(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 3, 5] - >>> list(group_2) - [2, 4, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = distribute(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 4, 7], [2, 5], [3, 6]] - - If the length of *iterable* is smaller than *n*, then the last returned - iterables will be empty: - - >>> children = distribute(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function uses :func:`itertools.tee` and may require significant - storage. If you need the order items in the smaller iterables to match the - original iterable, see :func:`divide`. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - children = tee(iterable, n) - return [islice(it, index, None, n) for index, it in enumerate(children)] - - -def stagger(iterable, offsets=(-1, 0, 1), longest=False, fillvalue=None): - """Yield tuples whose elements are offset from *iterable*. - The amount by which the `i`-th item in each tuple is offset is given by - the `i`-th item in *offsets*. - - >>> list(stagger([0, 1, 2, 3])) - [(None, 0, 1), (0, 1, 2), (1, 2, 3)] - >>> list(stagger(range(8), offsets=(0, 2, 4))) - [(0, 2, 4), (1, 3, 5), (2, 4, 6), (3, 5, 7)] - - By default, the sequence will end when the final element of a tuple is the - last item in the iterable. To continue until the first element of a tuple - is the last item in the iterable, set *longest* to ``True``:: - - >>> list(stagger([0, 1, 2, 3], longest=True)) - [(None, 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, None), (3, None, None)] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - children = tee(iterable, len(offsets)) - - return zip_offset( - *children, offsets=offsets, longest=longest, fillvalue=fillvalue - ) - - -def zip_offset(*iterables, offsets, longest=False, fillvalue=None): - """``zip`` the input *iterables* together, but offset the `i`-th iterable - by the `i`-th item in *offsets*. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1))) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e')] - - This can be used as a lightweight alternative to SciPy or pandas to analyze - data sets in which some series have a lead or lag relationship. - - By default, the sequence will end when the shortest iterable is exhausted. - To continue until the longest iterable is exhausted, set *longest* to - ``True``. - - >>> list(zip_offset('0123', 'abcdef', offsets=(0, 1), longest=True)) - [('0', 'b'), ('1', 'c'), ('2', 'd'), ('3', 'e'), (None, 'f')] - - By default, ``None`` will be used to replace offsets beyond the end of the - sequence. Specify *fillvalue* to use some other value. - - """ - if len(iterables) != len(offsets): - raise ValueError("Number of iterables and offsets didn't match") - - staggered = [] - for it, n in zip(iterables, offsets): - if n < 0: - staggered.append(chain(repeat(fillvalue, -n), it)) - elif n > 0: - staggered.append(islice(it, n, None)) - else: - staggered.append(it) - - if longest: - return zip_longest(*staggered, fillvalue=fillvalue) - - return zip(*staggered) - - -def sort_together(iterables, key_list=(0,), reverse=False): - """Return the input iterables sorted together, with *key_list* as the - priority for sorting. All iterables are trimmed to the length of the - shortest one. - - This can be used like the sorting function in a spreadsheet. If each - iterable represents a column of data, the key list determines which - columns are used for sorting. - - By default, all iterables are sorted using the ``0``-th iterable:: - - >>> iterables = [(4, 3, 2, 1), ('a', 'b', 'c', 'd')] - >>> sort_together(iterables) - [(1, 2, 3, 4), ('d', 'c', 'b', 'a')] - - Set a different key list to sort according to another iterable. - Specifying multiple keys dictates how ties are broken:: - - >>> iterables = [(3, 1, 2), (0, 1, 0), ('c', 'b', 'a')] - >>> sort_together(iterables, key_list=(1, 2)) - [(2, 3, 1), (0, 0, 1), ('a', 'c', 'b')] - - Set *reverse* to ``True`` to sort in descending order. - - >>> sort_together([(1, 2, 3), ('c', 'b', 'a')], reverse=True) - [(3, 2, 1), ('a', 'b', 'c')] - - """ - return list(zip(*sorted(zip(*iterables), - key=itemgetter(*key_list), - reverse=reverse))) - - -def unzip(iterable): - """The inverse of :func:`zip`, this function disaggregates the elements - of the zipped *iterable*. - - The ``i``-th iterable contains the ``i``-th element from each element - of the zipped iterable. The first element is used to to determine the - length of the remaining elements. - - >>> iterable = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> letters, numbers = unzip(iterable) - >>> list(letters) - ['a', 'b', 'c', 'd'] - >>> list(numbers) - [1, 2, 3, 4] - - This is similar to using ``zip(*iterable)``, but it avoids reading - *iterable* into memory. Note, however, that this function uses - :func:`itertools.tee` and thus may require significant storage. - - """ - head, iterable = spy(iter(iterable)) - if not head: - # empty iterable, e.g. zip([], [], []) - return () - # spy returns a one-length iterable as head - head = head[0] - iterables = tee(iterable, len(head)) - - def itemgetter(i): - def getter(obj): - try: - return obj[i] - except IndexError: - # basically if we have an iterable like - # iter([(1, 2, 3), (4, 5), (6,)]) - # the second unzipped iterable would fail at the third tuple - # since it would try to access tup[1] - # same with the third unzipped iterable and the second tuple - # to support these "improperly zipped" iterables, - # we create a custom itemgetter - # which just stops the unzipped iterables - # at first length mismatch - raise StopIteration - return getter - - return tuple(map(itemgetter(i), it) for i, it in enumerate(iterables)) - - -def divide(n, iterable): - """Divide the elements from *iterable* into *n* parts, maintaining - order. - - >>> group_1, group_2 = divide(2, [1, 2, 3, 4, 5, 6]) - >>> list(group_1) - [1, 2, 3] - >>> list(group_2) - [4, 5, 6] - - If the length of *iterable* is not evenly divisible by *n*, then the - length of the returned iterables will not be identical: - - >>> children = divide(3, [1, 2, 3, 4, 5, 6, 7]) - >>> [list(c) for c in children] - [[1, 2, 3], [4, 5], [6, 7]] - - If the length of the iterable is smaller than n, then the last returned - iterables will be empty: - - >>> children = divide(5, [1, 2, 3]) - >>> [list(c) for c in children] - [[1], [2], [3], [], []] - - This function will exhaust the iterable before returning and may require - significant storage. If order is not important, see :func:`distribute`, - which does not first pull the iterable into memory. - - """ - if n < 1: - raise ValueError('n must be at least 1') - - seq = tuple(iterable) - q, r = divmod(len(seq), n) - - ret = [] - for i in range(n): - start = (i * q) + (i if i < r else r) - stop = ((i + 1) * q) + (i + 1 if i + 1 < r else r) - ret.append(iter(seq[start:stop])) - - return ret - - -def always_iterable(obj, base_type=(str, bytes)): - """If *obj* is iterable, return an iterator over its items:: - - >>> obj = (1, 2, 3) - >>> list(always_iterable(obj)) - [1, 2, 3] - - If *obj* is not iterable, return a one-item iterable containing *obj*:: - - >>> obj = 1 - >>> list(always_iterable(obj)) - [1] - - If *obj* is ``None``, return an empty iterable: - - >>> obj = None - >>> list(always_iterable(None)) - [] - - By default, binary and text strings are not considered iterable:: - - >>> obj = 'foo' - >>> list(always_iterable(obj)) - ['foo'] - - If *base_type* is set, objects for which ``isinstance(obj, base_type)`` - returns ``True`` won't be considered iterable. - - >>> obj = {'a': 1} - >>> list(always_iterable(obj)) # Iterate over the dict's keys - ['a'] - >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit - [{'a': 1}] - - Set *base_type* to ``None`` to avoid any special handling and treat objects - Python considers iterable as iterable: - - >>> obj = 'foo' - >>> list(always_iterable(obj, base_type=None)) - ['f', 'o', 'o'] - """ - if obj is None: - return iter(()) - - if (base_type is not None) and isinstance(obj, base_type): - return iter((obj,)) - - try: - return iter(obj) - except TypeError: - return iter((obj,)) - - -def adjacent(predicate, iterable, distance=1): - """Return an iterable over `(bool, item)` tuples where the `item` is - drawn from *iterable* and the `bool` indicates whether - that item satisfies the *predicate* or is adjacent to an item that does. - - For example, to find whether items are adjacent to a ``3``:: - - >>> list(adjacent(lambda x: x == 3, range(6))) - [(False, 0), (False, 1), (True, 2), (True, 3), (True, 4), (False, 5)] - - Set *distance* to change what counts as adjacent. For example, to find - whether items are two places away from a ``3``: - - >>> list(adjacent(lambda x: x == 3, range(6), distance=2)) - [(False, 0), (True, 1), (True, 2), (True, 3), (True, 4), (True, 5)] - - This is useful for contextualizing the results of a search function. - For example, a code comparison tool might want to identify lines that - have changed, but also surrounding lines to give the viewer of the diff - context. - - The predicate function will only be called once for each item in the - iterable. - - See also :func:`groupby_transform`, which can be used with this function - to group ranges of items with the same `bool` value. - - """ - # Allow distance=0 mainly for testing that it reproduces results with map() - if distance < 0: - raise ValueError('distance must be at least 0') - - i1, i2 = tee(iterable) - padding = [False] * distance - selected = chain(padding, map(predicate, i1), padding) - adjacent_to_selected = map(any, windowed(selected, 2 * distance + 1)) - return zip(adjacent_to_selected, i2) - - -def groupby_transform(iterable, keyfunc=None, valuefunc=None): - """An extension of :func:`itertools.groupby` that transforms the values of - *iterable* after grouping them. - *keyfunc* is a function used to compute a grouping key for each item. - *valuefunc* is a function for transforming the items after grouping. - - >>> iterable = 'AaaABbBCcA' - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: x.lower() - >>> grouper = groupby_transform(iterable, keyfunc, valuefunc) - >>> [(k, ''.join(g)) for k, g in grouper] - [('A', 'aaaa'), ('B', 'bbb'), ('C', 'cc'), ('A', 'a')] - - *keyfunc* and *valuefunc* default to identity functions if they are not - specified. - - :func:`groupby_transform` is useful when grouping elements of an iterable - using a separate iterable as the key. To do this, :func:`zip` the iterables - and pass a *keyfunc* that extracts the first element and a *valuefunc* - that extracts the second element:: - - >>> from operator import itemgetter - >>> keys = [0, 0, 1, 1, 1, 2, 2, 2, 3] - >>> values = 'abcdefghi' - >>> iterable = zip(keys, values) - >>> grouper = groupby_transform(iterable, itemgetter(0), itemgetter(1)) - >>> [(k, ''.join(g)) for k, g in grouper] - [(0, 'ab'), (1, 'cde'), (2, 'fgh'), (3, 'i')] - - Note that the order of items in the iterable is significant. - Only adjacent items are grouped together, so if you don't want any - duplicate groups, you should sort the iterable by the key function. - - """ - valuefunc = (lambda x: x) if valuefunc is None else valuefunc - return ((k, map(valuefunc, g)) for k, g in groupby(iterable, keyfunc)) - - -def numeric_range(*args): - """An extension of the built-in ``range()`` function whose arguments can - be any orderable numeric type. - - With only *stop* specified, *start* defaults to ``0`` and *step* - defaults to ``1``. The output items will match the type of *stop*: - - >>> list(numeric_range(3.5)) - [0.0, 1.0, 2.0, 3.0] - - With only *start* and *stop* specified, *step* defaults to ``1``. The - output items will match the type of *start*: - - >>> from decimal import Decimal - >>> start = Decimal('2.1') - >>> stop = Decimal('5.1') - >>> list(numeric_range(start, stop)) - [Decimal('2.1'), Decimal('3.1'), Decimal('4.1')] - - With *start*, *stop*, and *step* specified the output items will match - the type of ``start + step``: - - >>> from fractions import Fraction - >>> start = Fraction(1, 2) # Start at 1/2 - >>> stop = Fraction(5, 2) # End at 5/2 - >>> step = Fraction(1, 2) # Count by 1/2 - >>> list(numeric_range(start, stop, step)) - [Fraction(1, 2), Fraction(1, 1), Fraction(3, 2), Fraction(2, 1)] - - If *step* is zero, ``ValueError`` is raised. Negative steps are supported: - - >>> list(numeric_range(3, -1, -1.0)) - [3.0, 2.0, 1.0, 0.0] - - Be aware of the limitations of floating point numbers; the representation - of the yielded numbers may be surprising. - - ``datetime.datetime`` objects can be used for *start* and *stop*, if *step* - is a ``datetime.timedelta`` object: - - >>> import datetime - >>> start = datetime.datetime(2019, 1, 1) - >>> stop = datetime.datetime(2019, 1, 3) - >>> step = datetime.timedelta(days=1) - >>> items = numeric_range(start, stop, step) - >>> next(items) - datetime.datetime(2019, 1, 1, 0, 0) - >>> next(items) - datetime.datetime(2019, 1, 2, 0, 0) - - """ - argc = len(args) - if argc == 1: - stop, = args - start = type(stop)(0) - step = 1 - elif argc == 2: - start, stop = args - step = 1 - elif argc == 3: - start, stop, step = args - else: - err_msg = 'numeric_range takes at most 3 arguments, got {}' - raise TypeError(err_msg.format(argc)) - - values = (start + (step * n) for n in count()) - zero = type(step)(0) - if step > zero: - return takewhile(partial(gt, stop), values) - elif step < zero: - return takewhile(partial(lt, stop), values) - else: - raise ValueError('numeric_range arg 3 must not be zero') - - -def count_cycle(iterable, n=None): - """Cycle through the items from *iterable* up to *n* times, yielding - the number of completed cycles along with each item. If *n* is omitted the - process repeats indefinitely. - - >>> list(count_cycle('AB', 3)) - [(0, 'A'), (0, 'B'), (1, 'A'), (1, 'B'), (2, 'A'), (2, 'B')] - - """ - iterable = tuple(iterable) - if not iterable: - return iter(()) - counter = count() if n is None else range(n) - return ((i, item) for i in counter for item in iterable) - - -def locate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(locate([0, 1, 1, 0, 1, 0, 0])) - [1, 2, 4] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item. - - >>> list(locate(['a', 'b', 'c', 'b'], lambda x: x == 'b')) - [1, 3] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(locate(iterable, pred=pred, window_size=3)) - [1, 5, 9] - - Use with :func:`seekable` to find indexes and then retrieve the associated - items: - - >>> from itertools import count - >>> from more_itertools import seekable - >>> source = (3 * n + 1 if (n % 2) else n // 2 for n in count()) - >>> it = seekable(source) - >>> pred = lambda x: x > 100 - >>> indexes = locate(it, pred=pred) - >>> i = next(indexes) - >>> it.seek(i) - >>> next(it) - 106 - - """ - if window_size is None: - return compress(count(), map(pred, iterable)) - - if window_size < 1: - raise ValueError('window size must be at least 1') - - it = windowed(iterable, window_size, fillvalue=_marker) - return compress(count(), starmap(pred, it)) - - -def lstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the beginning - for which *pred* returns ``True``. - - For example, to remove a set of items from the start of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(lstrip(iterable, pred)) - [1, 2, None, 3, False, None] - - This function is analogous to to :func:`str.lstrip`, and is essentially - an wrapper for :func:`itertools.dropwhile`. - - """ - return dropwhile(pred, iterable) - - -def rstrip(iterable, pred): - """Yield the items from *iterable*, but strip any from the end - for which *pred* returns ``True``. - - For example, to remove a set of items from the end of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(rstrip(iterable, pred)) - [None, False, None, 1, 2, None, 3] - - This function is analogous to :func:`str.rstrip`. - - """ - cache = [] - cache_append = cache.append - cache_clear = cache.clear - for x in iterable: - if pred(x): - cache_append(x) - else: - yield from cache - cache_clear() - yield x - - -def strip(iterable, pred): - """Yield the items from *iterable*, but strip any from the - beginning and end for which *pred* returns ``True``. - - For example, to remove a set of items from both ends of an iterable: - - >>> iterable = (None, False, None, 1, 2, None, 3, False, None) - >>> pred = lambda x: x in {None, False, ''} - >>> list(strip(iterable, pred)) - [1, 2, None, 3] - - This function is analogous to :func:`str.strip`. - - """ - return rstrip(lstrip(iterable, pred), pred) - - -def islice_extended(iterable, *args): - """An extension of :func:`itertools.islice` that supports negative values - for *stop*, *start*, and *step*. - - >>> iterable = iter('abcdefgh') - >>> list(islice_extended(iterable, -4, -1)) - ['e', 'f', 'g'] - - Slices with negative values require some caching of *iterable*, but this - function takes care to minimize the amount of memory required. - - For example, you can use a negative step with an infinite iterator: - - >>> from itertools import count - >>> list(islice_extended(count(), 110, 99, -2)) - [110, 108, 106, 104, 102, 100] - - """ - s = slice(*args) - start = s.start - stop = s.stop - if s.step == 0: - raise ValueError('step argument must be a non-zero integer or None.') - step = s.step or 1 - - it = iter(iterable) - - if step > 0: - start = 0 if (start is None) else start - - if (start < 0): - # Consume all but the last -start items - cache = deque(enumerate(it, 1), maxlen=-start) - len_iter = cache[-1][0] if cache else 0 - - # Adjust start to be positive - i = max(len_iter + start, 0) - - # Adjust stop to be positive - if stop is None: - j = len_iter - elif stop >= 0: - j = min(stop, len_iter) - else: - j = max(len_iter + stop, 0) - - # Slice the cache - n = j - i - if n <= 0: - return - - for index, item in islice(cache, 0, n, step): - yield item - elif (stop is not None) and (stop < 0): - # Advance to the start position - next(islice(it, start, start), None) - - # When stop is negative, we have to carry -stop items while - # iterating - cache = deque(islice(it, -stop), maxlen=-stop) - - for index, item in enumerate(it): - cached_item = cache.popleft() - if index % step == 0: - yield cached_item - cache.append(item) - else: - # When both start and stop are positive we have the normal case - yield from islice(it, start, stop, step) - else: - start = -1 if (start is None) else start - - if (stop is not None) and (stop < 0): - # Consume all but the last items - n = -stop - 1 - cache = deque(enumerate(it, 1), maxlen=n) - len_iter = cache[-1][0] if cache else 0 - - # If start and stop are both negative they are comparable and - # we can just slice. Otherwise we can adjust start to be negative - # and then slice. - if start < 0: - i, j = start, stop - else: - i, j = min(start - len_iter, -1), None - - for index, item in list(cache)[i:j:step]: - yield item - else: - # Advance to the stop position - if stop is not None: - m = stop + 1 - next(islice(it, m, m), None) - - # stop is positive, so if start is negative they are not comparable - # and we need the rest of the items. - if start < 0: - i = start - n = None - # stop is None and start is positive, so we just need items up to - # the start index. - elif stop is None: - i = None - n = start + 1 - # Both stop and start are positive, so they are comparable. - else: - i = None - n = start - stop - if n <= 0: - return - - cache = list(islice(it, n)) - - yield from cache[i::step] - - -def always_reversible(iterable): - """An extension of :func:`reversed` that supports all iterables, not - just those which implement the ``Reversible`` or ``Sequence`` protocols. - - >>> print(*always_reversible(x for x in range(3))) - 2 1 0 - - If the iterable is already reversible, this function returns the - result of :func:`reversed()`. If the iterable is not reversible, - this function will cache the remaining items in the iterable and - yield them in reverse order, which may require significant storage. - """ - try: - return reversed(iterable) - except TypeError: - return reversed(list(iterable)) - - -def consecutive_groups(iterable, ordering=lambda x: x): - """Yield groups of consecutive items using :func:`itertools.groupby`. - The *ordering* function determines whether two items are adjacent by - returning their position. - - By default, the ordering function is the identity function. This is - suitable for finding runs of numbers: - - >>> iterable = [1, 10, 11, 12, 20, 30, 31, 32, 33, 40] - >>> for group in consecutive_groups(iterable): - ... print(list(group)) - [1] - [10, 11, 12] - [20] - [30, 31, 32, 33] - [40] - - For finding runs of adjacent letters, try using the :meth:`index` method - of a string of letters: - - >>> from string import ascii_lowercase - >>> iterable = 'abcdfgilmnop' - >>> ordering = ascii_lowercase.index - >>> for group in consecutive_groups(iterable, ordering): - ... print(list(group)) - ['a', 'b', 'c', 'd'] - ['f', 'g'] - ['i'] - ['l', 'm', 'n', 'o', 'p'] - - """ - for k, g in groupby( - enumerate(iterable), key=lambda x: x[0] - ordering(x[1]) - ): - yield map(itemgetter(1), g) - - -def difference(iterable, func=sub, *, initial=None): - """By default, compute the first difference of *iterable* using - :func:`operator.sub`. - - >>> iterable = [0, 1, 3, 6, 10] - >>> list(difference(iterable)) - [0, 1, 2, 3, 4] - - This is the opposite of :func:`itertools.accumulate`'s default behavior: - - >>> from itertools import accumulate - >>> iterable = [0, 1, 2, 3, 4] - >>> list(accumulate(iterable)) - [0, 1, 3, 6, 10] - >>> list(difference(accumulate(iterable))) - [0, 1, 2, 3, 4] - - By default *func* is :func:`operator.sub`, but other functions can be - specified. They will be applied as follows:: - - A, B, C, D, ... --> A, func(B, A), func(C, B), func(D, C), ... - - For example, to do progressive division: - - >>> iterable = [1, 2, 6, 24, 120] # Factorial sequence - >>> func = lambda x, y: x // y - >>> list(difference(iterable, func)) - [1, 2, 3, 4, 5] - - Since Python 3.8, :func:`itertools.accumulate` can be supplied with an - *initial* keyword argument. If :func:`difference` is called with *initial* - set to something other than ``None``, it will skip the first element when - computing successive differences. - - >>> iterable = [100, 101, 103, 106] # accumate([1, 2, 3], initial=100) - >>> list(difference(iterable, initial=100)) - [1, 2, 3] - - """ - a, b = tee(iterable) - try: - first = [next(b)] - except StopIteration: - return iter([]) - - if initial is not None: - first = [] - - return chain(first, map(lambda x: func(x[1], x[0]), zip(a, b))) - - -class SequenceView(Sequence): - """Return a read-only view of the sequence object *target*. - - :class:`SequenceView` objects are analogous to Python's built-in - "dictionary view" types. They provide a dynamic view of a sequence's items, - meaning that when the sequence updates, so does the view. - - >>> seq = ['0', '1', '2'] - >>> view = SequenceView(seq) - >>> view - SequenceView(['0', '1', '2']) - >>> seq.append('3') - >>> view - SequenceView(['0', '1', '2', '3']) - - Sequence views support indexing, slicing, and length queries. They act - like the underlying sequence, except they don't allow assignment: - - >>> view[1] - '1' - >>> view[1:-1] - ['1', '2'] - >>> len(view) - 4 - - Sequence views are useful as an alternative to copying, as they don't - require (much) extra storage. - - """ - def __init__(self, target): - if not isinstance(target, Sequence): - raise TypeError - self._target = target - - def __getitem__(self, index): - return self._target[index] - - def __len__(self): - return len(self._target) - - def __repr__(self): - return '{}({})'.format(self.__class__.__name__, repr(self._target)) - - -class seekable: - """Wrap an iterator to allow for seeking backward and forward. This - progressively caches the items in the source iterable so they can be - re-visited. - - Call :meth:`seek` with an index to seek to that position in the source - iterable. - - To "reset" an iterator, seek to ``0``: - - >>> from itertools import count - >>> it = seekable((str(n) for n in count())) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> it.seek(0) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> next(it) - '3' - - You can also seek forward: - - >>> it = seekable((str(n) for n in range(20))) - >>> it.seek(10) - >>> next(it) - '10' - >>> it.seek(20) # Seeking past the end of the source isn't a problem - >>> list(it) - [] - >>> it.seek(0) # Resetting works even after hitting the end - >>> next(it), next(it), next(it) - ('0', '1', '2') - - The cache grows as the source iterable progresses, so beware of wrapping - very large or infinite iterables. - - You may view the contents of the cache with the :meth:`elements` method. - That returns a :class:`SequenceView`, a view that updates automatically: - - >>> it = seekable((str(n) for n in range(10))) - >>> next(it), next(it), next(it) - ('0', '1', '2') - >>> elements = it.elements() - >>> elements - SequenceView(['0', '1', '2']) - >>> next(it) - '3' - >>> elements - SequenceView(['0', '1', '2', '3']) - - """ - - def __init__(self, iterable): - self._source = iter(iterable) - self._cache = [] - self._index = None - - def __iter__(self): - return self - - def __next__(self): - if self._index is not None: - try: - item = self._cache[self._index] - except IndexError: - self._index = None - else: - self._index += 1 - return item - - item = next(self._source) - self._cache.append(item) - return item - - def elements(self): - return SequenceView(self._cache) - - def seek(self, index): - self._index = index - remainder = index - len(self._cache) - if remainder > 0: - consume(self, remainder) - - -class run_length: - """ - :func:`run_length.encode` compresses an iterable with run-length encoding. - It yields groups of repeated items with the count of how many times they - were repeated: - - >>> uncompressed = 'abbcccdddd' - >>> list(run_length.encode(uncompressed)) - [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - - :func:`run_length.decode` decompresses an iterable that was previously - compressed with run-length encoding. It yields the items of the - decompressed iterable: - - >>> compressed = [('a', 1), ('b', 2), ('c', 3), ('d', 4)] - >>> list(run_length.decode(compressed)) - ['a', 'b', 'b', 'c', 'c', 'c', 'd', 'd', 'd', 'd'] - - """ - - @staticmethod - def encode(iterable): - return ((k, ilen(g)) for k, g in groupby(iterable)) - - @staticmethod - def decode(iterable): - return chain.from_iterable(repeat(k, n) for k, n in iterable) - - -def exactly_n(iterable, n, predicate=bool): - """Return ``True`` if exactly ``n`` items in the iterable are ``True`` - according to the *predicate* function. - - >>> exactly_n([True, True, False], 2) - True - >>> exactly_n([True, True, False], 1) - False - >>> exactly_n([0, 1, 2, 3, 4, 5], 3, lambda x: x < 3) - True - - The iterable will be advanced until ``n + 1`` truthy items are encountered, - so avoid calling it on infinite iterables. - - """ - return len(take(n + 1, filter(predicate, iterable))) == n - - -def circular_shifts(iterable): - """Return a list of circular shifts of *iterable*. - - >>> circular_shifts(range(4)) - [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] - """ - lst = list(iterable) - return take(len(lst), windowed(cycle(lst), len(lst))) - - -def make_decorator(wrapping_func, result_index=0): - """Return a decorator version of *wrapping_func*, which is a function that - modifies an iterable. *result_index* is the position in that function's - signature where the iterable goes. - - This lets you use itertools on the "production end," i.e. at function - definition. This can augment what the function returns without changing the - function's code. - - For example, to produce a decorator version of :func:`chunked`: - - >>> from more_itertools import chunked - >>> chunker = make_decorator(chunked, result_index=0) - >>> @chunker(3) - ... def iter_range(n): - ... return iter(range(n)) - ... - >>> list(iter_range(9)) - [[0, 1, 2], [3, 4, 5], [6, 7, 8]] - - To only allow truthy items to be returned: - - >>> truth_serum = make_decorator(filter, result_index=1) - >>> @truth_serum(bool) - ... def boolean_test(): - ... return [0, 1, '', ' ', False, True] - ... - >>> list(boolean_test()) - [1, ' ', True] - - The :func:`peekable` and :func:`seekable` wrappers make for practical - decorators: - - >>> from more_itertools import peekable - >>> peekable_function = make_decorator(peekable) - >>> @peekable_function() - ... def str_range(*args): - ... return (str(x) for x in range(*args)) - ... - >>> it = str_range(1, 20, 2) - >>> next(it), next(it), next(it) - ('1', '3', '5') - >>> it.peek() - '7' - >>> next(it) - '7' - - """ - # See https://sites.google.com/site/bbayles/index/decorator_factory for - # notes on how this works. - def decorator(*wrapping_args, **wrapping_kwargs): - def outer_wrapper(f): - def inner_wrapper(*args, **kwargs): - result = f(*args, **kwargs) - wrapping_args_ = list(wrapping_args) - wrapping_args_.insert(result_index, result) - return wrapping_func(*wrapping_args_, **wrapping_kwargs) - - return inner_wrapper - - return outer_wrapper - - return decorator - - -def map_reduce(iterable, keyfunc, valuefunc=None, reducefunc=None): - """Return a dictionary that maps the items in *iterable* to categories - defined by *keyfunc*, transforms them with *valuefunc*, and - then summarizes them by category with *reducefunc*. - - *valuefunc* defaults to the identity function if it is unspecified. - If *reducefunc* is unspecified, no summarization takes place: - - >>> keyfunc = lambda x: x.upper() - >>> result = map_reduce('abbccc', keyfunc) - >>> sorted(result.items()) - [('A', ['a']), ('B', ['b', 'b']), ('C', ['c', 'c', 'c'])] - - Specifying *valuefunc* transforms the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> result = map_reduce('abbccc', keyfunc, valuefunc) - >>> sorted(result.items()) - [('A', [1]), ('B', [1, 1]), ('C', [1, 1, 1])] - - Specifying *reducefunc* summarizes the categorized items: - - >>> keyfunc = lambda x: x.upper() - >>> valuefunc = lambda x: 1 - >>> reducefunc = sum - >>> result = map_reduce('abbccc', keyfunc, valuefunc, reducefunc) - >>> sorted(result.items()) - [('A', 1), ('B', 2), ('C', 3)] - - You may want to filter the input iterable before applying the map/reduce - procedure: - - >>> all_items = range(30) - >>> items = [x for x in all_items if 10 <= x <= 20] # Filter - >>> keyfunc = lambda x: x % 2 # Evens map to 0; odds to 1 - >>> categories = map_reduce(items, keyfunc=keyfunc) - >>> sorted(categories.items()) - [(0, [10, 12, 14, 16, 18, 20]), (1, [11, 13, 15, 17, 19])] - >>> summaries = map_reduce(items, keyfunc=keyfunc, reducefunc=sum) - >>> sorted(summaries.items()) - [(0, 90), (1, 75)] - - Note that all items in the iterable are gathered into a list before the - summarization step, which may require significant storage. - - The returned object is a :obj:`collections.defaultdict` with the - ``default_factory`` set to ``None``, such that it behaves like a normal - dictionary. - - """ - valuefunc = (lambda x: x) if (valuefunc is None) else valuefunc - - ret = defaultdict(list) - for item in iterable: - key = keyfunc(item) - value = valuefunc(item) - ret[key].append(value) - - if reducefunc is not None: - for key, value_list in ret.items(): - ret[key] = reducefunc(value_list) - - ret.default_factory = None - return ret - - -def rlocate(iterable, pred=bool, window_size=None): - """Yield the index of each item in *iterable* for which *pred* returns - ``True``, starting from the right and moving left. - - *pred* defaults to :func:`bool`, which will select truthy items: - - >>> list(rlocate([0, 1, 1, 0, 1, 0, 0])) # Truthy at 1, 2, and 4 - [4, 2, 1] - - Set *pred* to a custom function to, e.g., find the indexes for a particular - item: - - >>> iterable = iter('abcb') - >>> pred = lambda x: x == 'b' - >>> list(rlocate(iterable, pred)) - [3, 1] - - If *window_size* is given, then the *pred* function will be called with - that many items. This enables searching for sub-sequences: - - >>> iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3] - >>> pred = lambda *args: args == (1, 2, 3) - >>> list(rlocate(iterable, pred=pred, window_size=3)) - [9, 5, 1] - - Beware, this function won't return anything for infinite iterables. - If *iterable* is reversible, ``rlocate`` will reverse it and search from - the right. Otherwise, it will search from the left and return the results - in reverse order. - - See :func:`locate` to for other example applications. - - """ - if window_size is None: - try: - len_iter = len(iterable) - return ( - len_iter - i - 1 for i in locate(reversed(iterable), pred) - ) - except TypeError: - pass - - return reversed(list(locate(iterable, pred, window_size))) - - -def replace(iterable, pred, substitutes, count=None, window_size=1): - """Yield the items from *iterable*, replacing the items for which *pred* - returns ``True`` with the items from the iterable *substitutes*. - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1] - >>> pred = lambda x: x == 0 - >>> substitutes = (2, 3) - >>> list(replace(iterable, pred, substitutes)) - [1, 1, 2, 3, 1, 1, 2, 3, 1, 1] - - If *count* is given, the number of replacements will be limited: - - >>> iterable = [1, 1, 0, 1, 1, 0, 1, 1, 0] - >>> pred = lambda x: x == 0 - >>> substitutes = [None] - >>> list(replace(iterable, pred, substitutes, count=2)) - [1, 1, None, 1, 1, None, 1, 1, 0] - - Use *window_size* to control the number of items passed as arguments to - *pred*. This allows for locating and replacing subsequences. - - >>> iterable = [0, 1, 2, 5, 0, 1, 2, 5] - >>> window_size = 3 - >>> pred = lambda *args: args == (0, 1, 2) # 3 items passed to pred - >>> substitutes = [3, 4] # Splice in these items - >>> list(replace(iterable, pred, substitutes, window_size=window_size)) - [3, 4, 5, 3, 4, 5] - - """ - if window_size < 1: - raise ValueError('window_size must be at least 1') - - # Save the substitutes iterable, since it's used more than once - substitutes = tuple(substitutes) - - # Add padding such that the number of windows matches the length of the - # iterable - it = chain(iterable, [_marker] * (window_size - 1)) - windows = windowed(it, window_size) - - n = 0 - for w in windows: - # If the current window matches our predicate (and we haven't hit - # our maximum number of replacements), splice in the substitutes - # and then consume the following windows that overlap with this one. - # For example, if the iterable is (0, 1, 2, 3, 4...) - # and the window size is 2, we have (0, 1), (1, 2), (2, 3)... - # If the predicate matches on (0, 1), we need to zap (0, 1) and (1, 2) - if pred(*w): - if (count is None) or (n < count): - n += 1 - yield from substitutes - consume(windows, window_size - 1) - continue - - # If there was no match (or we've reached the replacement limit), - # yield the first item from the window. - if w and (w[0] is not _marker): - yield w[0] - - -def partitions(iterable): - """Yield all possible order-perserving partitions of *iterable*. - - >>> iterable = 'abc' - >>> for part in partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['ab', 'c'] - ['a', 'b', 'c'] - - This is unrelated to :func:`partition`. - - """ - sequence = list(iterable) - n = len(sequence) - for i in powerset(range(1, n)): - yield [sequence[i:j] for i, j in zip((0,) + i, i + (n,))] - - -def set_partitions(iterable, k=None): - """ - Yield the set partitions of *iterable* into *k* parts. Set partitions are - not order-preserving. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable, 2): - ... print([''.join(p) for p in part]) - ['a', 'bc'] - ['b', 'ac'] - ['c', 'ab'] - - - If *k* is not given, every set partition is generated. - - >>> iterable = 'abc' - >>> for part in set_partitions(iterable): - ... print([''.join(p) for p in part]) - ['abc'] - ['a', 'bc'] - ['b', 'ac'] - ['c', 'ab'] - ['a', 'b', 'c'] - - """ - iterable = tuple(iterable) - n = len(iterable) - - def less(a, b): - """Orders index tuples lexically""" - return (a < b) if (len(a) == len(b)) else (len(a) < len(b)) - - def part_inds(inds, k, prev): - """Generates set partitions by index""" - if (k <= 1) and (less(prev, inds)): - yield (inds,) - else: - for curr_part_size in range(1, len(inds)): - for curr_part in combinations(inds, curr_part_size): - nxt_part = tuple(i for i in inds if i not in curr_part) - if less(prev, curr_part): - for nxt_parts in part_inds(nxt_part, k - 1, curr_part): - yield (curr_part,) + nxt_parts - - def apply_selection(k): - """Creates partitions of iterable using index partitions""" - nonlocal iterable - nonlocal n - - for ind_parts in part_inds(range(n), k, ()): - yield tuple(tuple(iterable[i] for i in inds) for inds in ind_parts) - - if k is None: - for k in range(1, n + 1): - yield from apply_selection(k) - else: - yield from apply_selection(k) - - -def time_limited(limit_seconds, iterable): - """ - Yield items from *iterable* until *limit_seconds* have passed. - - >>> from time import sleep - >>> def generator(): - ... yield 1 - ... yield 2 - ... sleep(0.2) - ... yield 3 - >>> iterable = generator() - >>> list(time_limited(0.1, iterable)) - [1, 2] - - Note that the time is checked before each item is yielded, and iteration - stops if the time elapsed is greater than *limit_seconds*. If your time - limit is 1 second, but it takes 2 seconds to generate the first item from - the iterable, the function will run for 2 seconds and not yield anything. - - """ - if limit_seconds < 0: - raise ValueError('limit_seconds must be positive') - - start_time = monotonic() - for item in iterable: - if monotonic() - start_time > limit_seconds: - break - yield item - - -def only(iterable, default=None, too_long=None): - """If *iterable* has only one item, return it. - If it has zero items, return *default*. - If it has more than one item, raise the exception given by *too_long*, - which is ``ValueError`` by default. - - >>> only([], default='missing') - 'missing' - >>> only([1]) - 1 - >>> only([1, 2]) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - ValueError: too many items in iterable (expected 1)' - >>> only([1, 2], too_long=TypeError) # doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ... - TypeError - - Note that :func:`only` attempts to advance *iterable* twice to ensure there - is only one item. See :func:`spy` or :func:`peekable` to check - iterable contents less destructively. - """ - it = iter(iterable) - value = next(it, default) - - try: - next(it) - except StopIteration: - pass - else: - raise too_long or ValueError('too many items in iterable (expected 1)') - - return value - - -def ichunked(iterable, n): - """Break *iterable* into sub-iterables with *n* elements each. - :func:`ichunked` is like :func:`chunked`, but it yields iterables - instead of lists. - - If the sub-iterables are read in order, the elements of *iterable* - won't be stored in memory. - If they are read out of order, :func:`itertools.tee` is used to cache - elements as necessary. - - >>> from itertools import count - >>> all_chunks = ichunked(count(), 4) - >>> c_1, c_2, c_3 = next(all_chunks), next(all_chunks), next(all_chunks) - >>> list(c_2) # c_1's elements have been cached; c_3's haven't been - [4, 5, 6, 7] - >>> list(c_1) - [0, 1, 2, 3] - >>> list(c_3) - [8, 9, 10, 11] - - """ - source = iter(iterable) - - while True: - # Check to see whether we're at the end of the source iterable - item = next(source, _marker) - if item is _marker: - return - - # Clone the source and yield an n-length slice - source, it = tee(chain([item], source)) - yield islice(it, n) - - # Advance the source iterable - consume(source, n) - - -def distinct_combinations(iterable, r): - """Yield the distinct combinations of *r* items taken from *iterable*. - - >>> list(distinct_combinations([0, 0, 1], 2)) - [(0, 0), (0, 1)] - - Equivalent to ``set(combinations(iterable))``, except duplicates are not - generated and thrown away. For larger input sequences this is much more - efficient. - - """ - if r < 0: - raise ValueError('r must be non-negative') - elif r == 0: - yield () - else: - pool = tuple(iterable) - for i, prefix in unique_everseen(enumerate(pool), key=itemgetter(1)): - for suffix in distinct_combinations(pool[i + 1:], r - 1): - yield (prefix,) + suffix - - -def filter_except(validator, iterable, *exceptions): - """Yield the items from *iterable* for which the *validator* function does - not raise one of the specified *exceptions*. - - *validator* is called for each item in *iterable*. - It should be a function that accepts one argument and raises an exception - if that item is not valid. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(filter_except(int, iterable, ValueError, TypeError)) - ['1', '2', '4'] - - If an exception other than one given by *exceptions* is raised by - *validator*, it is raised like normal. - """ - exceptions = tuple(exceptions) - for item in iterable: - try: - validator(item) - except exceptions: - pass - else: - yield item - - -def map_except(function, iterable, *exceptions): - """Transform each item from *iterable* with *function* and yield the - result, unless *function* raises one of the specified *exceptions*. - - *function* is called to transform each item in *iterable*. - It should be a accept one argument. - - >>> iterable = ['1', '2', 'three', '4', None] - >>> list(map_except(int, iterable, ValueError, TypeError)) - [1, 2, 4] - - If an exception other than one given by *exceptions* is raised by - *function*, it is raised like normal. - """ - exceptions = tuple(exceptions) - for item in iterable: - try: - yield function(item) - except exceptions: - pass diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/recipes.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/recipes.py deleted file mode 100644 index 4c9f03b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/recipes.py +++ /dev/null @@ -1,571 +0,0 @@ -"""Imported from the recipes section of the itertools documentation. - -All functions taken from the recipes section of the itertools library docs -[1]_. -Some backward-compatible usability improvements have been made. - -.. [1] http://docs.python.org/library/itertools.html#recipes - -""" -import warnings -from collections import deque -from itertools import ( - chain, - combinations, - count, - cycle, - filterfalse, - groupby, - islice, - repeat, - starmap, - tee, - zip_longest, -) -import operator -from random import randrange, sample, choice - -__all__ = [ - 'all_equal', - 'consume', - 'dotproduct', - 'first_true', - 'flatten', - 'grouper', - 'iter_except', - 'ncycles', - 'nth', - 'nth_combination', - 'padnone', - 'pairwise', - 'partition', - 'powerset', - 'prepend', - 'quantify', - 'random_combination_with_replacement', - 'random_combination', - 'random_permutation', - 'random_product', - 'repeatfunc', - 'roundrobin', - 'tabulate', - 'tail', - 'take', - 'unique_everseen', - 'unique_justseen', -] - - -def take(n, iterable): - """Return first *n* items of the iterable as a list. - - >>> take(3, range(10)) - [0, 1, 2] - >>> take(5, range(3)) - [0, 1, 2] - - Effectively a short replacement for ``next`` based iterator consumption - when you want more than one item, but less than the whole iterator. - - """ - return list(islice(iterable, n)) - - -def tabulate(function, start=0): - """Return an iterator over the results of ``func(start)``, - ``func(start + 1)``, ``func(start + 2)``... - - *func* should be a function that accepts one integer argument. - - If *start* is not specified it defaults to 0. It will be incremented each - time the iterator is advanced. - - >>> square = lambda x: x ** 2 - >>> iterator = tabulate(square, -3) - >>> take(4, iterator) - [9, 4, 1, 0] - - """ - return map(function, count(start)) - - -def tail(n, iterable): - """Return an iterator over the last *n* items of *iterable*. - - >>> t = tail(3, 'ABCDEFG') - >>> list(t) - ['E', 'F', 'G'] - - """ - return iter(deque(iterable, maxlen=n)) - - -def consume(iterator, n=None): - """Advance *iterable* by *n* steps. If *n* is ``None``, consume it - entirely. - - Efficiently exhausts an iterator without returning values. Defaults to - consuming the whole iterator, but an optional second argument may be - provided to limit consumption. - - >>> i = (x for x in range(10)) - >>> next(i) - 0 - >>> consume(i, 3) - >>> next(i) - 4 - >>> consume(i) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - If the iterator has fewer items remaining than the provided limit, the - whole iterator will be consumed. - - >>> i = (x for x in range(3)) - >>> consume(i, 5) - >>> next(i) - Traceback (most recent call last): - File "", line 1, in - StopIteration - - """ - # Use functions that consume iterators at C speed. - if n is None: - # feed the entire iterator into a zero-length deque - deque(iterator, maxlen=0) - else: - # advance to the empty slice starting at position n - next(islice(iterator, n, n), None) - - -def nth(iterable, n, default=None): - """Returns the nth item or a default value. - - >>> l = range(10) - >>> nth(l, 3) - 3 - >>> nth(l, 20, "zebra") - 'zebra' - - """ - return next(islice(iterable, n, None), default) - - -def all_equal(iterable): - """ - Returns ``True`` if all the elements are equal to each other. - - >>> all_equal('aaaa') - True - >>> all_equal('aaab') - False - - """ - g = groupby(iterable) - return next(g, True) and not next(g, False) - - -def quantify(iterable, pred=bool): - """Return the how many times the predicate is true. - - >>> quantify([True, False, True]) - 2 - - """ - return sum(map(pred, iterable)) - - -def padnone(iterable): - """Returns the sequence of elements and then returns ``None`` indefinitely. - - >>> take(5, padnone(range(3))) - [0, 1, 2, None, None] - - Useful for emulating the behavior of the built-in :func:`map` function. - - See also :func:`padded`. - - """ - return chain(iterable, repeat(None)) - - -def ncycles(iterable, n): - """Returns the sequence elements *n* times - - >>> list(ncycles(["a", "b"], 3)) - ['a', 'b', 'a', 'b', 'a', 'b'] - - """ - return chain.from_iterable(repeat(tuple(iterable), n)) - - -def dotproduct(vec1, vec2): - """Returns the dot product of the two iterables. - - >>> dotproduct([10, 10], [20, 20]) - 400 - - """ - return sum(map(operator.mul, vec1, vec2)) - - -def flatten(listOfLists): - """Return an iterator flattening one level of nesting in a list of lists. - - >>> list(flatten([[0, 1], [2, 3]])) - [0, 1, 2, 3] - - See also :func:`collapse`, which can flatten multiple levels of nesting. - - """ - return chain.from_iterable(listOfLists) - - -def repeatfunc(func, times=None, *args): - """Call *func* with *args* repeatedly, returning an iterable over the - results. - - If *times* is specified, the iterable will terminate after that many - repetitions: - - >>> from operator import add - >>> times = 4 - >>> args = 3, 5 - >>> list(repeatfunc(add, times, *args)) - [8, 8, 8, 8] - - If *times* is ``None`` the iterable will not terminate: - - >>> from random import randrange - >>> times = None - >>> args = 1, 11 - >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP - [2, 4, 8, 1, 8, 4] - - """ - if times is None: - return starmap(func, repeat(args)) - return starmap(func, repeat(args, times)) - - -def pairwise(iterable): - """Returns an iterator of paired items, overlapping, from the original - - >>> take(4, pairwise(count())) - [(0, 1), (1, 2), (2, 3), (3, 4)] - - """ - a, b = tee(iterable) - next(b, None) - return zip(a, b) - - -def grouper(iterable, n, fillvalue=None): - """Collect data into fixed-length chunks or blocks. - - >>> list(grouper('ABCDEFG', 3, 'x')) - [('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')] - - """ - if isinstance(iterable, int): - warnings.warn( - "grouper expects iterable as first parameter", - DeprecationWarning, - ) - n, iterable = iterable, n - args = [iter(iterable)] * n - return zip_longest(fillvalue=fillvalue, *args) - - -def roundrobin(*iterables): - """Yields an item from each iterable, alternating between them. - - >>> list(roundrobin('ABC', 'D', 'EF')) - ['A', 'D', 'E', 'B', 'F', 'C'] - - This function produces the same output as :func:`interleave_longest`, but - may perform better for some inputs (in particular when the number of - iterables is small). - - """ - # Recipe credited to George Sakkis - pending = len(iterables) - nexts = cycle(iter(it).__next__ for it in iterables) - while pending: - try: - for next in nexts: - yield next() - except StopIteration: - pending -= 1 - nexts = cycle(islice(nexts, pending)) - - -def partition(pred, iterable): - """ - Returns a 2-tuple of iterables derived from the input iterable. - The first yields the items that have ``pred(item) == False``. - The second yields the items that have ``pred(item) == True``. - - >>> is_odd = lambda x: x % 2 != 0 - >>> iterable = range(10) - >>> even_items, odd_items = partition(is_odd, iterable) - >>> list(even_items), list(odd_items) - ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9]) - - """ - # partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9 - t1, t2 = tee(iterable) - return filterfalse(pred, t1), filter(pred, t2) - - -def powerset(iterable): - """Yields all possible subsets of the iterable. - - >>> list(powerset([1, 2, 3])) - [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] - - :func:`powerset` will operate on iterables that aren't :class:`set` - instances, so repeated elements in the input will produce repeated elements - in the output. Use :func:`unique_everseen` on the input to avoid generating - duplicates: - - >>> seq = [1, 1, 0] - >>> list(powerset(seq)) - [(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)] - >>> from more_itertools import unique_everseen - >>> list(powerset(unique_everseen(seq))) - [(), (1,), (0,), (1, 0)] - - """ - s = list(iterable) - return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1)) - - -def unique_everseen(iterable, key=None): - """ - Yield unique elements, preserving order. - - >>> list(unique_everseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D'] - >>> list(unique_everseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'D'] - - Sequences with a mix of hashable and unhashable items can be used. - The function will be slower (i.e., `O(n^2)`) for unhashable items. - - Remember that ``list`` objects are unhashable - you can use the *key* - parameter to transform the list to a tuple (which is hashable) to - avoid a slowdown. - - >>> iterable = ([1, 2], [2, 3], [1, 2]) - >>> list(unique_everseen(iterable)) # Slow - [[1, 2], [2, 3]] - >>> list(unique_everseen(iterable, key=tuple)) # Faster - [[1, 2], [2, 3]] - - Similary, you may want to convert unhashable ``set`` objects with - ``key=frozenset``. For ``dict`` objects, - ``key=lambda x: frozenset(x.items())`` can be used. - - """ - seenset = set() - seenset_add = seenset.add - seenlist = [] - seenlist_add = seenlist.append - if key is None: - for element in iterable: - try: - if element not in seenset: - seenset_add(element) - yield element - except TypeError: - if element not in seenlist: - seenlist_add(element) - yield element - else: - for element in iterable: - k = key(element) - try: - if k not in seenset: - seenset_add(k) - yield element - except TypeError: - if k not in seenlist: - seenlist_add(k) - yield element - - -def unique_justseen(iterable, key=None): - """Yields elements in order, ignoring serial duplicates - - >>> list(unique_justseen('AAAABBBCCDAABBB')) - ['A', 'B', 'C', 'D', 'A', 'B'] - >>> list(unique_justseen('ABBCcAD', str.lower)) - ['A', 'B', 'C', 'A', 'D'] - - """ - return map(next, map(operator.itemgetter(1), groupby(iterable, key))) - - -def iter_except(func, exception, first=None): - """Yields results from a function repeatedly until an exception is raised. - - Converts a call-until-exception interface to an iterator interface. - Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel - to end the loop. - - >>> l = [0, 1, 2] - >>> list(iter_except(l.pop, IndexError)) - [2, 1, 0] - - """ - try: - if first is not None: - yield first() - while 1: - yield func() - except exception: - pass - - -def first_true(iterable, default=None, pred=None): - """ - Returns the first true value in the iterable. - - If no true value is found, returns *default* - - If *pred* is not None, returns the first item for which - ``pred(item) == True`` . - - >>> first_true(range(10)) - 1 - >>> first_true(range(10), pred=lambda x: x > 5) - 6 - >>> first_true(range(10), default='missing', pred=lambda x: x > 9) - 'missing' - - """ - return next(filter(pred, iterable), default) - - -def random_product(*args, **kwds): - """Draw an item at random from each of the input iterables. - - >>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP - ('c', 3, 'Z') - - If *repeat* is provided as a keyword argument, that many items will be - drawn from each iterable. - - >>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP - ('a', 2, 'd', 3) - - This equivalent to taking a random selection from - ``itertools.product(*args, **kwarg)``. - - """ - pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1) - return tuple(choice(pool) for pool in pools) - - -def random_permutation(iterable, r=None): - """Return a random *r* length permutation of the elements in *iterable*. - - If *r* is not specified or is ``None``, then *r* defaults to the length of - *iterable*. - - >>> random_permutation(range(5)) # doctest:+SKIP - (3, 4, 0, 1, 2) - - This equivalent to taking a random selection from - ``itertools.permutations(iterable, r)``. - - """ - pool = tuple(iterable) - r = len(pool) if r is None else r - return tuple(sample(pool, r)) - - -def random_combination(iterable, r): - """Return a random *r* length subsequence of the elements in *iterable*. - - >>> random_combination(range(5), 3) # doctest:+SKIP - (2, 3, 4) - - This equivalent to taking a random selection from - ``itertools.combinations(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(sample(range(n), r)) - return tuple(pool[i] for i in indices) - - -def random_combination_with_replacement(iterable, r): - """Return a random *r* length subsequence of elements in *iterable*, - allowing individual elements to be repeated. - - >>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP - (0, 0, 1, 2, 2) - - This equivalent to taking a random selection from - ``itertools.combinations_with_replacement(iterable, r)``. - - """ - pool = tuple(iterable) - n = len(pool) - indices = sorted(randrange(n) for i in range(r)) - return tuple(pool[i] for i in indices) - - -def nth_combination(iterable, r, index): - """Equivalent to ``list(combinations(iterable, r))[index]``. - - The subsequences of *iterable* that are of length *r* can be ordered - lexicographically. :func:`nth_combination` computes the subsequence at - sort position *index* directly, without computing the previous - subsequences. - - """ - pool = tuple(iterable) - n = len(pool) - if (r < 0) or (r > n): - raise ValueError - - c = 1 - k = min(r, n - r) - for i in range(1, k + 1): - c = c * (n - k + i) // i - - if index < 0: - index += c - - if (index < 0) or (index >= c): - raise IndexError - - result = [] - while r: - c, n, r = c * r // n, n - 1, r - 1 - while index >= c: - index -= c - c, n = c * (n - r) // n, n - 1 - result.append(pool[-1 - n]) - - return tuple(result) - - -def prepend(value, iterator): - """Yield *value*, followed by the elements in *iterator*. - - >>> value = '0' - >>> iterator = ['1', '2', '3'] - >>> list(prepend(value, iterator)) - ['0', '1', '2', '3'] - - To prepend multiple values, see :func:`itertools.chain`. - - """ - return chain([value], iterator) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_more.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_more.py deleted file mode 100644 index c4211d0..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_more.py +++ /dev/null @@ -1,2699 +0,0 @@ -from collections import OrderedDict -from datetime import datetime, timedelta -from decimal import Decimal -from doctest import DocTestSuite -from fractions import Fraction -from functools import partial, reduce -from heapq import merge -from io import StringIO -from itertools import ( - accumulate, - chain, - combinations, - count, - groupby, - islice, - permutations, - product, - repeat, -) -from operator import add, mul, itemgetter, or_ -from sys import version_info -from time import sleep -from unittest import skipIf, TestCase - -import more_itertools as mi - - -def load_tests(loader, tests, ignore): - # Add the doctests - tests.addTests(DocTestSuite('more_itertools.more')) - return tests - - -class CollateTests(TestCase): - """Unit tests for ``collate()``""" - # Also accidentally tests peekable, though that could use its own tests - - def test_default(self): - """Test with the default `key` function.""" - iterables = [range(4), range(7), range(3, 6)] - self.assertEqual( - sorted(reduce(list.__add__, [list(it) for it in iterables])), - list(mi.collate(*iterables)) - ) - - def test_key(self): - """Test using a custom `key` function.""" - iterables = [range(5, 0, -1), range(4, 0, -1)] - actual = sorted( - reduce(list.__add__, [list(it) for it in iterables]), reverse=True - ) - expected = list(mi.collate(*iterables, key=lambda x: -x)) - self.assertEqual(actual, expected) - - def test_empty(self): - """Be nice if passed an empty list of iterables.""" - self.assertEqual([], list(mi.collate())) - - def test_one(self): - """Work when only 1 iterable is passed.""" - self.assertEqual([0, 1], list(mi.collate(range(2)))) - - def test_reverse(self): - """Test the `reverse` kwarg.""" - iterables = [range(4, 0, -1), range(7, 0, -1), range(3, 6, -1)] - - actual = sorted( - reduce(list.__add__, [list(it) for it in iterables]), reverse=True - ) - expected = list(mi.collate(*iterables, reverse=True)) - self.assertEqual(actual, expected) - - def test_alias(self): - self.assertNotEqual(merge.__doc__, mi.collate.__doc__) - self.assertNotEqual(partial.__doc__, mi.collate.__doc__) - - -class ChunkedTests(TestCase): - """Tests for ``chunked()``""" - - def test_even(self): - """Test when ``n`` divides evenly into the length of the iterable.""" - self.assertEqual( - list(mi.chunked('ABCDEF', 3)), [['A', 'B', 'C'], ['D', 'E', 'F']] - ) - - def test_odd(self): - """Test when ``n`` does not divide evenly into the length of the - iterable. - - """ - self.assertEqual( - list(mi.chunked('ABCDE', 3)), [['A', 'B', 'C'], ['D', 'E']] - ) - - -class FirstTests(TestCase): - """Tests for ``first()``""" - - def test_many(self): - """Test that it works on many-item iterables.""" - # Also try it on a generator expression to make sure it works on - # whatever those return, across Python versions. - self.assertEqual(mi.first(x for x in range(4)), 0) - - def test_one(self): - """Test that it doesn't raise StopIteration prematurely.""" - self.assertEqual(mi.first([3]), 3) - - def test_empty_stop_iteration(self): - """It should raise StopIteration for empty iterables.""" - self.assertRaises(ValueError, lambda: mi.first([])) - - def test_default(self): - """It should return the provided default arg for empty iterables.""" - self.assertEqual(mi.first([], 'boo'), 'boo') - - -class IterOnlyRange: - """User-defined iterable class which only support __iter__. - - >>> r = IterOnlyRange(5) - >>> r[0] - AttributeError: IterOnlyRange instance has no attribute '__getitem__' - - Note: In Python 3, ``TypeError`` will be raised because ``object`` is - inherited implicitly by default. - - >>> r[0] - TypeError: 'IterOnlyRange' object does not support indexing - """ - def __init__(self, n): - """Set the length of the range.""" - self.n = n - - def __iter__(self): - """Works same as range().""" - return iter(range(self.n)) - - -class LastTests(TestCase): - """Tests for ``last()``""" - - def test_many_nonsliceable(self): - """Test that it works on many-item non-slice-able iterables.""" - # Also try it on a generator expression to make sure it works on - # whatever those return, across Python versions. - self.assertEqual(mi.last(x for x in range(4)), 3) - - def test_one_nonsliceable(self): - """Test that it doesn't raise StopIteration prematurely.""" - self.assertEqual(mi.last(x for x in range(1)), 0) - - def test_empty_stop_iteration_nonsliceable(self): - """It should raise ValueError for empty non-slice-able iterables.""" - self.assertRaises(ValueError, lambda: mi.last(x for x in range(0))) - - def test_default_nonsliceable(self): - """It should return the provided default arg for empty non-slice-able - iterables. - """ - self.assertEqual(mi.last((x for x in range(0)), 'boo'), 'boo') - - def test_many_sliceable(self): - """Test that it works on many-item slice-able iterables.""" - self.assertEqual(mi.last([0, 1, 2, 3]), 3) - - def test_one_sliceable(self): - """Test that it doesn't raise StopIteration prematurely.""" - self.assertEqual(mi.last([3]), 3) - - def test_empty_stop_iteration_sliceable(self): - """It should raise ValueError for empty slice-able iterables.""" - self.assertRaises(ValueError, lambda: mi.last([])) - - def test_default_sliceable(self): - """It should return the provided default arg for empty slice-able - iterables. - """ - self.assertEqual(mi.last([], 'boo'), 'boo') - - def test_dict(self): - """last(dic) and last(dic.keys()) should return same result.""" - dic = {'a': 1, 'b': 2, 'c': 3} - self.assertEqual(mi.last(dic), mi.last(dic.keys())) - - def test_ordereddict(self): - """last(dic) should return the last key.""" - od = OrderedDict() - od['a'] = 1 - od['b'] = 2 - od['c'] = 3 - self.assertEqual(mi.last(od), 'c') - - def test_customrange(self): - """It should work on custom class where [] raises AttributeError.""" - self.assertEqual(mi.last(IterOnlyRange(5)), 4) - - -class PeekableTests(TestCase): - """Tests for ``peekable()`` behavor not incidentally covered by testing - ``collate()`` - - """ - def test_peek_default(self): - """Make sure passing a default into ``peek()`` works.""" - p = mi.peekable([]) - self.assertEqual(p.peek(7), 7) - - def test_truthiness(self): - """Make sure a ``peekable`` tests true iff there are items remaining in - the iterable. - - """ - p = mi.peekable([]) - self.assertFalse(p) - - p = mi.peekable(range(3)) - self.assertTrue(p) - - def test_simple_peeking(self): - """Make sure ``next`` and ``peek`` advance and don't advance the - iterator, respectively. - - """ - p = mi.peekable(range(10)) - self.assertEqual(next(p), 0) - self.assertEqual(p.peek(), 1) - self.assertEqual(next(p), 1) - - def test_indexing(self): - """ - Indexing into the peekable shouldn't advance the iterator. - """ - p = mi.peekable('abcdefghijkl') - - # The 0th index is what ``next()`` will return - self.assertEqual(p[0], 'a') - self.assertEqual(next(p), 'a') - - # Indexing further into the peekable shouldn't advance the itertor - self.assertEqual(p[2], 'd') - self.assertEqual(next(p), 'b') - - # The 0th index moves up with the iterator; the last index follows - self.assertEqual(p[0], 'c') - self.assertEqual(p[9], 'l') - - self.assertEqual(next(p), 'c') - self.assertEqual(p[8], 'l') - - # Negative indexing should work too - self.assertEqual(p[-2], 'k') - self.assertEqual(p[-9], 'd') - self.assertRaises(IndexError, lambda: p[-10]) - - def test_slicing(self): - """Slicing the peekable shouldn't advance the iterator.""" - seq = list('abcdefghijkl') - p = mi.peekable(seq) - - # Slicing the peekable should just be like slicing a re-iterable - self.assertEqual(p[1:4], seq[1:4]) - - # Advancing the iterator moves the slices up also - self.assertEqual(next(p), 'a') - self.assertEqual(p[1:4], seq[1:][1:4]) - - # Implicit starts and stop should work - self.assertEqual(p[:5], seq[1:][:5]) - self.assertEqual(p[:], seq[1:][:]) - - # Indexing past the end should work - self.assertEqual(p[:100], seq[1:][:100]) - - # Steps should work, including negative - self.assertEqual(p[::2], seq[1:][::2]) - self.assertEqual(p[::-1], seq[1:][::-1]) - - def test_slicing_reset(self): - """Test slicing on a fresh iterable each time""" - iterable = ['0', '1', '2', '3', '4', '5'] - indexes = list(range(-4, len(iterable) + 4)) + [None] - steps = [1, 2, 3, 4, -1, -2, -3, 4] - for slice_args in product(indexes, indexes, steps): - it = iter(iterable) - p = mi.peekable(it) - next(p) - index = slice(*slice_args) - actual = p[index] - expected = iterable[1:][index] - self.assertEqual(actual, expected, slice_args) - - def test_slicing_error(self): - iterable = '01234567' - p = mi.peekable(iter(iterable)) - - # Prime the cache - p.peek() - old_cache = list(p._cache) - - # Illegal slice - with self.assertRaises(ValueError): - p[1:-1:0] - - # Neither the cache nor the iteration should be affected - self.assertEqual(old_cache, list(p._cache)) - self.assertEqual(list(p), list(iterable)) - - def test_passthrough(self): - """Iterating a peekable without using ``peek()`` or ``prepend()`` - should just give the underlying iterable's elements (a trivial test but - useful to set a baseline in case something goes wrong)""" - expected = [1, 2, 3, 4, 5] - actual = list(mi.peekable(expected)) - self.assertEqual(actual, expected) - - # prepend() behavior tests - - def test_prepend(self): - """Tests intersperesed ``prepend()`` and ``next()`` calls""" - it = mi.peekable(range(2)) - actual = [] - - # Test prepend() before next() - it.prepend(10) - actual += [next(it), next(it)] - - # Test prepend() between next()s - it.prepend(11) - actual += [next(it), next(it)] - - # Test prepend() after source iterable is consumed - it.prepend(12) - actual += [next(it)] - - expected = [10, 0, 11, 1, 12] - self.assertEqual(actual, expected) - - def test_multi_prepend(self): - """Tests prepending multiple items and getting them in proper order""" - it = mi.peekable(range(5)) - actual = [next(it), next(it)] - it.prepend(10, 11, 12) - it.prepend(20, 21) - actual += list(it) - expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4] - self.assertEqual(actual, expected) - - def test_empty(self): - """Tests prepending in front of an empty iterable""" - it = mi.peekable([]) - it.prepend(10) - actual = list(it) - expected = [10] - self.assertEqual(actual, expected) - - def test_prepend_truthiness(self): - """Tests that ``__bool__()`` or ``__nonzero__()`` works properly - with ``prepend()``""" - it = mi.peekable(range(5)) - self.assertTrue(it) - actual = list(it) - self.assertFalse(it) - it.prepend(10) - self.assertTrue(it) - actual += [next(it)] - self.assertFalse(it) - expected = [0, 1, 2, 3, 4, 10] - self.assertEqual(actual, expected) - - def test_multi_prepend_peek(self): - """Tests prepending multiple elements and getting them in reverse order - while peeking""" - it = mi.peekable(range(5)) - actual = [next(it), next(it)] - self.assertEqual(it.peek(), 2) - it.prepend(10, 11, 12) - self.assertEqual(it.peek(), 10) - it.prepend(20, 21) - self.assertEqual(it.peek(), 20) - actual += list(it) - self.assertFalse(it) - expected = [0, 1, 20, 21, 10, 11, 12, 2, 3, 4] - self.assertEqual(actual, expected) - - def test_prepend_after_stop(self): - """Test resuming iteration after a previous exhaustion""" - it = mi.peekable(range(3)) - self.assertEqual(list(it), [0, 1, 2]) - self.assertRaises(StopIteration, lambda: next(it)) - it.prepend(10) - self.assertEqual(next(it), 10) - self.assertRaises(StopIteration, lambda: next(it)) - - def test_prepend_slicing(self): - """Tests interaction between prepending and slicing""" - seq = list(range(20)) - p = mi.peekable(seq) - - p.prepend(30, 40, 50) - pseq = [30, 40, 50] + seq # pseq for prepended_seq - - # adapt the specific tests from test_slicing - self.assertEqual(p[0], 30) - self.assertEqual(p[1:8], pseq[1:8]) - self.assertEqual(p[1:], pseq[1:]) - self.assertEqual(p[:5], pseq[:5]) - self.assertEqual(p[:], pseq[:]) - self.assertEqual(p[:100], pseq[:100]) - self.assertEqual(p[::2], pseq[::2]) - self.assertEqual(p[::-1], pseq[::-1]) - - def test_prepend_indexing(self): - """Tests interaction between prepending and indexing""" - seq = list(range(20)) - p = mi.peekable(seq) - - p.prepend(30, 40, 50) - - self.assertEqual(p[0], 30) - self.assertEqual(next(p), 30) - self.assertEqual(p[2], 0) - self.assertEqual(next(p), 40) - self.assertEqual(p[0], 50) - self.assertEqual(p[9], 8) - self.assertEqual(next(p), 50) - self.assertEqual(p[8], 8) - self.assertEqual(p[-2], 18) - self.assertEqual(p[-9], 11) - self.assertRaises(IndexError, lambda: p[-21]) - - def test_prepend_iterable(self): - """Tests prepending from an iterable""" - it = mi.peekable(range(5)) - # Don't directly use the range() object to avoid any range-specific - # optimizations - it.prepend(*(x for x in range(5))) - actual = list(it) - expected = list(chain(range(5), range(5))) - self.assertEqual(actual, expected) - - def test_prepend_many(self): - """Tests that prepending a huge number of elements works""" - it = mi.peekable(range(5)) - # Don't directly use the range() object to avoid any range-specific - # optimizations - it.prepend(*(x for x in range(20000))) - actual = list(it) - expected = list(chain(range(20000), range(5))) - self.assertEqual(actual, expected) - - def test_prepend_reversed(self): - """Tests prepending from a reversed iterable""" - it = mi.peekable(range(3)) - it.prepend(*reversed((10, 11, 12))) - actual = list(it) - expected = [12, 11, 10, 0, 1, 2] - self.assertEqual(actual, expected) - - -class ConsumerTests(TestCase): - """Tests for ``consumer()``""" - - def test_consumer(self): - @mi.consumer - def eater(): - while True: - x = yield # noqa - - e = eater() - e.send('hi') # without @consumer, would raise TypeError - - -class DistinctPermutationsTests(TestCase): - def test_distinct_permutations(self): - """Make sure the output for ``distinct_permutations()`` is the same as - set(permutations(it)). - - """ - iterable = ['z', 'a', 'a', 'q', 'q', 'q', 'y'] - test_output = sorted(mi.distinct_permutations(iterable)) - ref_output = sorted(set(permutations(iterable))) - self.assertEqual(test_output, ref_output) - - def test_other_iterables(self): - """Make sure ``distinct_permutations()`` accepts a different type of - iterables. - - """ - # a generator - iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y']) - test_output = sorted(mi.distinct_permutations(iterable)) - # "reload" it - iterable = (c for c in ['z', 'a', 'a', 'q', 'q', 'q', 'y']) - ref_output = sorted(set(permutations(iterable))) - self.assertEqual(test_output, ref_output) - - # an iterator - iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y']) - test_output = sorted(mi.distinct_permutations(iterable)) - # "reload" it - iterable = iter(['z', 'a', 'a', 'q', 'q', 'q', 'y']) - ref_output = sorted(set(permutations(iterable))) - self.assertEqual(test_output, ref_output) - - -class IlenTests(TestCase): - def test_ilen(self): - """Sanity-checks for ``ilen()``.""" - # Non-empty - self.assertEqual( - mi.ilen(filter(lambda x: x % 10 == 0, range(101))), 11 - ) - - # Empty - self.assertEqual(mi.ilen((x for x in range(0))), 0) - - # Iterable with __len__ - self.assertEqual(mi.ilen(list(range(6))), 6) - - -class WithIterTests(TestCase): - def test_with_iter(self): - s = StringIO('One fish\nTwo fish') - initial_words = [line.split()[0] for line in mi.with_iter(s)] - - # Iterable's items should be faithfully represented - self.assertEqual(initial_words, ['One', 'Two']) - # The file object should be closed - self.assertTrue(s.closed) - - -class OneTests(TestCase): - def test_basic(self): - it = iter(['item']) - self.assertEqual(mi.one(it), 'item') - - def test_too_short(self): - it = iter([]) - self.assertRaises(ValueError, lambda: mi.one(it)) - self.assertRaises(IndexError, lambda: mi.one(it, too_short=IndexError)) - - def test_too_long(self): - it = count() - self.assertRaises(ValueError, lambda: mi.one(it)) # burn 0 and 1 - self.assertEqual(next(it), 2) - self.assertRaises( - OverflowError, lambda: mi.one(it, too_long=OverflowError) - ) - - -class IntersperseTest(TestCase): - """ Tests for intersperse() """ - - def test_even(self): - iterable = (x for x in '01') - self.assertEqual( - list(mi.intersperse(None, iterable)), ['0', None, '1'] - ) - - def test_odd(self): - iterable = (x for x in '012') - self.assertEqual( - list(mi.intersperse(None, iterable)), ['0', None, '1', None, '2'] - ) - - def test_nested(self): - element = ('a', 'b') - iterable = (x for x in '012') - actual = list(mi.intersperse(element, iterable)) - expected = ['0', ('a', 'b'), '1', ('a', 'b'), '2'] - self.assertEqual(actual, expected) - - def test_not_iterable(self): - self.assertRaises(TypeError, lambda: mi.intersperse('x', 1)) - - def test_n(self): - for n, element, expected in [ - (1, '_', ['0', '_', '1', '_', '2', '_', '3', '_', '4', '_', '5']), - (2, '_', ['0', '1', '_', '2', '3', '_', '4', '5']), - (3, '_', ['0', '1', '2', '_', '3', '4', '5']), - (4, '_', ['0', '1', '2', '3', '_', '4', '5']), - (5, '_', ['0', '1', '2', '3', '4', '_', '5']), - (6, '_', ['0', '1', '2', '3', '4', '5']), - (7, '_', ['0', '1', '2', '3', '4', '5']), - (3, ['a', 'b'], ['0', '1', '2', ['a', 'b'], '3', '4', '5']), - ]: - iterable = (x for x in '012345') - actual = list(mi.intersperse(element, iterable, n=n)) - self.assertEqual(actual, expected) - - def test_n_zero(self): - self.assertRaises( - ValueError, lambda: list(mi.intersperse('x', '012', n=0)) - ) - - -class UniqueToEachTests(TestCase): - """Tests for ``unique_to_each()``""" - - def test_all_unique(self): - """When all the input iterables are unique the output should match - the input.""" - iterables = [[1, 2], [3, 4, 5], [6, 7, 8]] - self.assertEqual(mi.unique_to_each(*iterables), iterables) - - def test_duplicates(self): - """When there are duplicates in any of the input iterables that aren't - in the rest, those duplicates should be emitted.""" - iterables = ["mississippi", "missouri"] - self.assertEqual( - mi.unique_to_each(*iterables), [['p', 'p'], ['o', 'u', 'r']] - ) - - def test_mixed(self): - """When the input iterables contain different types the function should - still behave properly""" - iterables = ['x', (i for i in range(3)), [1, 2, 3], tuple()] - self.assertEqual(mi.unique_to_each(*iterables), [['x'], [0], [3], []]) - - -class WindowedTests(TestCase): - """Tests for ``windowed()``""" - - def test_basic(self): - actual = list(mi.windowed([1, 2, 3, 4, 5], 3)) - expected = [(1, 2, 3), (2, 3, 4), (3, 4, 5)] - self.assertEqual(actual, expected) - - def test_large_size(self): - """ - When the window size is larger than the iterable, and no fill value is - given,``None`` should be filled in. - """ - actual = list(mi.windowed([1, 2, 3, 4, 5], 6)) - expected = [(1, 2, 3, 4, 5, None)] - self.assertEqual(actual, expected) - - def test_fillvalue(self): - """ - When sizes don't match evenly, the given fill value should be used. - """ - iterable = [1, 2, 3, 4, 5] - - for n, kwargs, expected in [ - (6, {}, [(1, 2, 3, 4, 5, '!')]), # n > len(iterable) - (3, {'step': 3}, [(1, 2, 3), (4, 5, '!')]), # using ``step`` - ]: - actual = list(mi.windowed(iterable, n, fillvalue='!', **kwargs)) - self.assertEqual(actual, expected) - - def test_zero(self): - """When the window size is zero, an empty tuple should be emitted.""" - actual = list(mi.windowed([1, 2, 3, 4, 5], 0)) - expected = [tuple()] - self.assertEqual(actual, expected) - - def test_negative(self): - """When the window size is negative, ValueError should be raised.""" - with self.assertRaises(ValueError): - list(mi.windowed([1, 2, 3, 4, 5], -1)) - - def test_step(self): - """The window should advance by the number of steps provided""" - iterable = [1, 2, 3, 4, 5, 6, 7] - for n, step, expected in [ - (3, 2, [(1, 2, 3), (3, 4, 5), (5, 6, 7)]), # n > step - (3, 3, [(1, 2, 3), (4, 5, 6), (7, None, None)]), # n == step - (3, 4, [(1, 2, 3), (5, 6, 7)]), # line up nicely - (3, 5, [(1, 2, 3), (6, 7, None)]), # off by one - (3, 6, [(1, 2, 3), (7, None, None)]), # off by two - (3, 7, [(1, 2, 3)]), # step past the end - (7, 8, [(1, 2, 3, 4, 5, 6, 7)]), # step > len(iterable) - ]: - actual = list(mi.windowed(iterable, n, step=step)) - self.assertEqual(actual, expected) - - # Step must be greater than or equal to 1 - with self.assertRaises(ValueError): - list(mi.windowed(iterable, 3, step=0)) - - -class SubstringsTests(TestCase): - def test_basic(self): - iterable = (x for x in range(4)) - actual = list(mi.substrings(iterable)) - expected = [ - (0,), - (1,), - (2,), - (3,), - (0, 1), - (1, 2), - (2, 3), - (0, 1, 2), - (1, 2, 3), - (0, 1, 2, 3), - ] - self.assertEqual(actual, expected) - - def test_strings(self): - iterable = 'abc' - actual = list(mi.substrings(iterable)) - expected = [ - ('a',), ('b',), ('c',), ('a', 'b'), ('b', 'c'), ('a', 'b', 'c') - ] - self.assertEqual(actual, expected) - - def test_empty(self): - iterable = iter([]) - actual = list(mi.substrings(iterable)) - expected = [] - self.assertEqual(actual, expected) - - def test_order(self): - iterable = [2, 0, 1] - actual = list(mi.substrings(iterable)) - expected = [(2,), (0,), (1,), (2, 0), (0, 1), (2, 0, 1)] - self.assertEqual(actual, expected) - - -class SubstringsIndexesTests(TestCase): - def test_basic(self): - sequence = [x for x in range(4)] - actual = list(mi.substrings_indexes(sequence)) - expected = [ - ([0], 0, 1), - ([1], 1, 2), - ([2], 2, 3), - ([3], 3, 4), - ([0, 1], 0, 2), - ([1, 2], 1, 3), - ([2, 3], 2, 4), - ([0, 1, 2], 0, 3), - ([1, 2, 3], 1, 4), - ([0, 1, 2, 3], 0, 4), - ] - self.assertEqual(actual, expected) - - def test_strings(self): - sequence = 'abc' - actual = list(mi.substrings_indexes(sequence)) - expected = [ - ('a', 0, 1), - ('b', 1, 2), - ('c', 2, 3), - ('ab', 0, 2), - ('bc', 1, 3), - ('abc', 0, 3), - ] - self.assertEqual(actual, expected) - - def test_empty(self): - sequence = [] - actual = list(mi.substrings_indexes(sequence)) - expected = [] - self.assertEqual(actual, expected) - - def test_order(self): - sequence = [2, 0, 1] - actual = list(mi.substrings_indexes(sequence)) - expected = [ - ([2], 0, 1), - ([0], 1, 2), - ([1], 2, 3), - ([2, 0], 0, 2), - ([0, 1], 1, 3), - ([2, 0, 1], 0, 3), - ] - self.assertEqual(actual, expected) - - def test_reverse(self): - sequence = [2, 0, 1] - actual = list(mi.substrings_indexes(sequence, reverse=True)) - expected = [ - ([2, 0, 1], 0, 3), - ([2, 0], 0, 2), - ([0, 1], 1, 3), - ([2], 0, 1), - ([0], 1, 2), - ([1], 2, 3), - ] - self.assertEqual(actual, expected) - - -class BucketTests(TestCase): - """Tests for ``bucket()``""" - - def test_basic(self): - iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33] - D = mi.bucket(iterable, key=lambda x: 10 * (x // 10)) - - # In-order access - self.assertEqual(list(D[10]), [10, 11, 12]) - - # Out of order access - self.assertEqual(list(D[30]), [30, 31, 33]) - self.assertEqual(list(D[20]), [20, 21, 22, 23]) - - self.assertEqual(list(D[40]), []) # Nothing in here! - - def test_in(self): - iterable = [10, 20, 30, 11, 21, 31, 12, 22, 23, 33] - D = mi.bucket(iterable, key=lambda x: 10 * (x // 10)) - - self.assertIn(10, D) - self.assertNotIn(40, D) - self.assertIn(20, D) - self.assertNotIn(21, D) - - # Checking in-ness shouldn't advance the iterator - self.assertEqual(next(D[10]), 10) - - def test_validator(self): - iterable = count(0) - key = lambda x: int(str(x)[0]) # First digit of each number - validator = lambda x: 0 < x < 10 # No leading zeros - D = mi.bucket(iterable, key, validator=validator) - self.assertEqual(mi.take(3, D[1]), [1, 10, 11]) - self.assertNotIn(0, D) # Non-valid entries don't return True - self.assertNotIn(0, D._cache) # Don't store non-valid entries - self.assertEqual(list(D[0]), []) - - -class SpyTests(TestCase): - """Tests for ``spy()``""" - - def test_basic(self): - original_iterable = iter('abcdefg') - head, new_iterable = mi.spy(original_iterable) - self.assertEqual(head, ['a']) - self.assertEqual( - list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g'] - ) - - def test_unpacking(self): - original_iterable = iter('abcdefg') - (first, second, third), new_iterable = mi.spy(original_iterable, 3) - self.assertEqual(first, 'a') - self.assertEqual(second, 'b') - self.assertEqual(third, 'c') - self.assertEqual( - list(new_iterable), ['a', 'b', 'c', 'd', 'e', 'f', 'g'] - ) - - def test_too_many(self): - original_iterable = iter('abc') - head, new_iterable = mi.spy(original_iterable, 4) - self.assertEqual(head, ['a', 'b', 'c']) - self.assertEqual(list(new_iterable), ['a', 'b', 'c']) - - def test_zero(self): - original_iterable = iter('abc') - head, new_iterable = mi.spy(original_iterable, 0) - self.assertEqual(head, []) - self.assertEqual(list(new_iterable), ['a', 'b', 'c']) - - -class InterleaveTests(TestCase): - def test_even(self): - actual = list(mi.interleave([1, 4, 7], [2, 5, 8], [3, 6, 9])) - expected = [1, 2, 3, 4, 5, 6, 7, 8, 9] - self.assertEqual(actual, expected) - - def test_short(self): - actual = list(mi.interleave([1, 4], [2, 5, 7], [3, 6, 8])) - expected = [1, 2, 3, 4, 5, 6] - self.assertEqual(actual, expected) - - def test_mixed_types(self): - it_list = ['a', 'b', 'c', 'd'] - it_str = '12345' - it_inf = count() - actual = list(mi.interleave(it_list, it_str, it_inf)) - expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', 3] - self.assertEqual(actual, expected) - - -class InterleaveLongestTests(TestCase): - def test_even(self): - actual = list(mi.interleave_longest([1, 4, 7], [2, 5, 8], [3, 6, 9])) - expected = [1, 2, 3, 4, 5, 6, 7, 8, 9] - self.assertEqual(actual, expected) - - def test_short(self): - actual = list(mi.interleave_longest([1, 4], [2, 5, 7], [3, 6, 8])) - expected = [1, 2, 3, 4, 5, 6, 7, 8] - self.assertEqual(actual, expected) - - def test_mixed_types(self): - it_list = ['a', 'b', 'c', 'd'] - it_str = '12345' - it_gen = (x for x in range(3)) - actual = list(mi.interleave_longest(it_list, it_str, it_gen)) - expected = ['a', '1', 0, 'b', '2', 1, 'c', '3', 2, 'd', '4', '5'] - self.assertEqual(actual, expected) - - -class TestCollapse(TestCase): - """Tests for ``collapse()``""" - - def test_collapse(self): - l = [[1], 2, [[3], 4], [[[5]]]] - self.assertEqual(list(mi.collapse(l)), [1, 2, 3, 4, 5]) - - def test_collapse_to_string(self): - l = [["s1"], "s2", [["s3"], "s4"], [[["s5"]]]] - self.assertEqual(list(mi.collapse(l)), ["s1", "s2", "s3", "s4", "s5"]) - - def test_collapse_to_bytes(self): - l = [[b"s1"], b"s2", [[b"s3"], b"s4"], [[[b"s5"]]]] - self.assertEqual( - list(mi.collapse(l)), [b"s1", b"s2", b"s3", b"s4", b"s5"] - ) - - def test_collapse_flatten(self): - l = [[1], [2], [[3], 4], [[[5]]]] - self.assertEqual(list(mi.collapse(l, levels=1)), list(mi.flatten(l))) - - def test_collapse_to_level(self): - l = [[1], 2, [[3], 4], [[[5]]]] - self.assertEqual(list(mi.collapse(l, levels=2)), [1, 2, 3, 4, [5]]) - self.assertEqual( - list(mi.collapse(mi.collapse(l, levels=1), levels=1)), - list(mi.collapse(l, levels=2)) - ) - - def test_collapse_to_list(self): - l = (1, [2], (3, [4, (5,)], 'ab')) - actual = list(mi.collapse(l, base_type=list)) - expected = [1, [2], 3, [4, (5,)], 'ab'] - self.assertEqual(actual, expected) - - -class SideEffectTests(TestCase): - """Tests for ``side_effect()``""" - - def test_individual(self): - # The function increments the counter for each call - counter = [0] - - def func(arg): - counter[0] += 1 - - result = list(mi.side_effect(func, range(10))) - self.assertEqual(result, list(range(10))) - self.assertEqual(counter[0], 10) - - def test_chunked(self): - # The function increments the counter for each call - counter = [0] - - def func(arg): - counter[0] += 1 - - result = list(mi.side_effect(func, range(10), 2)) - self.assertEqual(result, list(range(10))) - self.assertEqual(counter[0], 5) - - def test_before_after(self): - f = StringIO() - collector = [] - - def func(item): - print(item, file=f) - collector.append(f.getvalue()) - - def it(): - yield 'a' - yield 'b' - raise RuntimeError('kaboom') - - before = lambda: print('HEADER', file=f) - after = f.close - - try: - mi.consume(mi.side_effect(func, it(), before=before, after=after)) - except RuntimeError: - pass - - # The iterable should have been written to the file - self.assertEqual(collector, ['HEADER\na\n', 'HEADER\na\nb\n']) - - # The file should be closed even though something bad happened - self.assertTrue(f.closed) - - def test_before_fails(self): - f = StringIO() - func = lambda x: print(x, file=f) - - def before(): - raise RuntimeError('ouch') - - try: - mi.consume( - mi.side_effect(func, 'abc', before=before, after=f.close) - ) - except RuntimeError: - pass - - # The file should be closed even though something bad happened in the - # before function - self.assertTrue(f.closed) - - -class SlicedTests(TestCase): - """Tests for ``sliced()``""" - - def test_even(self): - """Test when the length of the sequence is divisible by *n*""" - seq = 'ABCDEFGHI' - self.assertEqual(list(mi.sliced(seq, 3)), ['ABC', 'DEF', 'GHI']) - - def test_odd(self): - """Test when the length of the sequence is not divisible by *n*""" - seq = 'ABCDEFGHI' - self.assertEqual(list(mi.sliced(seq, 4)), ['ABCD', 'EFGH', 'I']) - - def test_not_sliceable(self): - seq = (x for x in 'ABCDEFGHI') - - with self.assertRaises(TypeError): - list(mi.sliced(seq, 3)) - - -class SplitAtTests(TestCase): - """Tests for ``split()``""" - - def comp_with_str_split(self, str_to_split, delim): - pred = lambda c: c == delim - actual = list(map(''.join, mi.split_at(str_to_split, pred))) - expected = str_to_split.split(delim) - self.assertEqual(actual, expected) - - def test_seperators(self): - test_strs = ['', 'abcba', 'aaabbbcccddd', 'e'] - for s, delim in product(test_strs, 'abcd'): - self.comp_with_str_split(s, delim) - - -class SplitBeforeTest(TestCase): - """Tests for ``split_before()``""" - - def test_starts_with_sep(self): - actual = list(mi.split_before('xooxoo', lambda c: c == 'x')) - expected = [['x', 'o', 'o'], ['x', 'o', 'o']] - self.assertEqual(actual, expected) - - def test_ends_with_sep(self): - actual = list(mi.split_before('ooxoox', lambda c: c == 'x')) - expected = [['o', 'o'], ['x', 'o', 'o'], ['x']] - self.assertEqual(actual, expected) - - def test_no_sep(self): - actual = list(mi.split_before('ooo', lambda c: c == 'x')) - expected = [['o', 'o', 'o']] - self.assertEqual(actual, expected) - - -class SplitAfterTest(TestCase): - """Tests for ``split_after()``""" - - def test_starts_with_sep(self): - actual = list(mi.split_after('xooxoo', lambda c: c == 'x')) - expected = [['x'], ['o', 'o', 'x'], ['o', 'o']] - self.assertEqual(actual, expected) - - def test_ends_with_sep(self): - actual = list(mi.split_after('ooxoox', lambda c: c == 'x')) - expected = [['o', 'o', 'x'], ['o', 'o', 'x']] - self.assertEqual(actual, expected) - - def test_no_sep(self): - actual = list(mi.split_after('ooo', lambda c: c == 'x')) - expected = [['o', 'o', 'o']] - self.assertEqual(actual, expected) - - -class SplitIntoTests(TestCase): - """Tests for ``split_into()``""" - - def test_iterable_just_right(self): - """Size of ``iterable`` equals the sum of ``sizes``.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [2, 3, 4] - expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_iterable_too_small(self): - """Size of ``iterable`` is smaller than sum of ``sizes``. Last return - list is shorter as a result.""" - iterable = [1, 2, 3, 4, 5, 6, 7] - sizes = [2, 3, 4] - expected = [[1, 2], [3, 4, 5], [6, 7]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_iterable_too_small_extra(self): - """Size of ``iterable`` is smaller than sum of ``sizes``. Second last - return list is shorter and last return list is empty as a result.""" - iterable = [1, 2, 3, 4, 5, 6, 7] - sizes = [2, 3, 4, 5] - expected = [[1, 2], [3, 4, 5], [6, 7], []] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_iterable_too_large(self): - """Size of ``iterable`` is larger than sum of ``sizes``. Not all - items of iterable are returned.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [2, 3, 2] - expected = [[1, 2], [3, 4, 5], [6, 7]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_using_none_with_leftover(self): - """Last item of ``sizes`` is None when items still remain in - ``iterable``. Last list returned stretches to fit all remaining items - of ``iterable``.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [2, 3, None] - expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_using_none_without_leftover(self): - """Last item of ``sizes`` is None when no items remain in - ``iterable``. Last list returned is empty.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [2, 3, 4, None] - expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9], []] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_using_none_mid_sizes(self): - """None is present in ``sizes`` but is not the last item. Last list - returned stretches to fit all remaining items of ``iterable`` but - all items in ``sizes`` after None are ignored.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [2, 3, None, 4] - expected = [[1, 2], [3, 4, 5], [6, 7, 8, 9]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_iterable_empty(self): - """``iterable`` argument is empty but ``sizes`` is not. An empty - list is returned for each item in ``sizes``.""" - iterable = [] - sizes = [2, 4, 2] - expected = [[], [], []] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_iterable_empty_using_none(self): - """``iterable`` argument is empty but ``sizes`` is not. An empty - list is returned for each item in ``sizes`` that is not after a - None item.""" - iterable = [] - sizes = [2, 4, None, 2] - expected = [[], [], []] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_sizes_empty(self): - """``sizes`` argument is empty but ``iterable`` is not. An empty - generator is returned.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [] - expected = [] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_both_empty(self): - """Both ``sizes`` and ``iterable`` arguments are empty. An empty - generator is returned.""" - iterable = [] - sizes = [] - expected = [] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_bool_in_sizes(self): - """A bool object is present in ``sizes`` is treated as a 1 or 0 for - ``True`` or ``False`` due to bool being an instance of int.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [3, True, 2, False] - expected = [[1, 2, 3], [4], [5, 6], []] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_invalid_in_sizes(self): - """A ValueError is raised if an object in ``sizes`` is neither ``None`` - or an integer.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [1, [], 3] - with self.assertRaises(ValueError): - list(mi.split_into(iterable, sizes)) - - def test_invalid_in_sizes_after_none(self): - """A item in ``sizes`` that is invalid will not raise a TypeError if it - comes after a ``None`` item.""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = [3, 4, None, []] - expected = [[1, 2, 3], [4, 5, 6, 7], [8, 9]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - def test_generator_iterable_integrity(self): - """Check that if ``iterable`` is an iterator, it is consumed only by as - many items as the sum of ``sizes``.""" - iterable = (i for i in range(10)) - sizes = [2, 3] - - expected = [[0, 1], [2, 3, 4]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - iterable_expected = [5, 6, 7, 8, 9] - iterable_actual = list(iterable) - self.assertEqual(iterable_actual, iterable_expected) - - def test_generator_sizes_integrity(self): - """Check that if ``sizes`` is an iterator, it is consumed only until a - ``None`` item is reached""" - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9] - sizes = (i for i in [1, 2, None, 3, 4]) - - expected = [[1], [2, 3], [4, 5, 6, 7, 8, 9]] - actual = list(mi.split_into(iterable, sizes)) - self.assertEqual(actual, expected) - - sizes_expected = [3, 4] - sizes_actual = list(sizes) - self.assertEqual(sizes_actual, sizes_expected) - - -class PaddedTest(TestCase): - """Tests for ``padded()``""" - - def test_no_n(self): - seq = [1, 2, 3] - - # No fillvalue - self.assertEqual(mi.take(5, mi.padded(seq)), [1, 2, 3, None, None]) - - # With fillvalue - self.assertEqual( - mi.take(5, mi.padded(seq, fillvalue='')), [1, 2, 3, '', ''] - ) - - def test_invalid_n(self): - self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=-1))) - self.assertRaises(ValueError, lambda: list(mi.padded([1, 2, 3], n=0))) - - def test_valid_n(self): - seq = [1, 2, 3, 4, 5] - - # No need for padding: len(seq) <= n - self.assertEqual(list(mi.padded(seq, n=4)), [1, 2, 3, 4, 5]) - self.assertEqual(list(mi.padded(seq, n=5)), [1, 2, 3, 4, 5]) - - # No fillvalue - self.assertEqual( - list(mi.padded(seq, n=7)), [1, 2, 3, 4, 5, None, None] - ) - - # With fillvalue - self.assertEqual( - list(mi.padded(seq, fillvalue='', n=7)), [1, 2, 3, 4, 5, '', ''] - ) - - def test_next_multiple(self): - seq = [1, 2, 3, 4, 5, 6] - - # No need for padding: len(seq) % n == 0 - self.assertEqual( - list(mi.padded(seq, n=3, next_multiple=True)), [1, 2, 3, 4, 5, 6] - ) - - # Padding needed: len(seq) < n - self.assertEqual( - list(mi.padded(seq, n=8, next_multiple=True)), - [1, 2, 3, 4, 5, 6, None, None] - ) - - # No padding needed: len(seq) == n - self.assertEqual( - list(mi.padded(seq, n=6, next_multiple=True)), [1, 2, 3, 4, 5, 6] - ) - - # Padding needed: len(seq) > n - self.assertEqual( - list(mi.padded(seq, n=4, next_multiple=True)), - [1, 2, 3, 4, 5, 6, None, None] - ) - - # With fillvalue - self.assertEqual( - list(mi.padded(seq, fillvalue='', n=4, next_multiple=True)), - [1, 2, 3, 4, 5, 6, '', ''] - ) - - -class DistributeTest(TestCase): - """Tests for distribute()""" - - def test_invalid_n(self): - self.assertRaises(ValueError, lambda: mi.distribute(-1, [1, 2, 3])) - self.assertRaises(ValueError, lambda: mi.distribute(0, [1, 2, 3])) - - def test_basic(self): - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - - for n, expected in [ - (1, [iterable]), - (2, [[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]]), - (3, [[1, 4, 7, 10], [2, 5, 8], [3, 6, 9]]), - (10, [[n] for n in range(1, 10 + 1)]), - ]: - self.assertEqual( - [list(x) for x in mi.distribute(n, iterable)], expected - ) - - def test_large_n(self): - iterable = [1, 2, 3, 4] - self.assertEqual( - [list(x) for x in mi.distribute(6, iterable)], - [[1], [2], [3], [4], [], []] - ) - - -class StaggerTest(TestCase): - """Tests for ``stagger()``""" - - def test_default(self): - iterable = [0, 1, 2, 3] - actual = list(mi.stagger(iterable)) - expected = [(None, 0, 1), (0, 1, 2), (1, 2, 3)] - self.assertEqual(actual, expected) - - def test_offsets(self): - iterable = [0, 1, 2, 3] - for offsets, expected in [ - ((-2, 0, 2), [('', 0, 2), ('', 1, 3)]), - ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3)]), - ((1, 2), [(1, 2), (2, 3)]), - ]: - all_groups = mi.stagger(iterable, offsets=offsets, fillvalue='') - self.assertEqual(list(all_groups), expected) - - def test_longest(self): - iterable = [0, 1, 2, 3] - for offsets, expected in [ - ( - (-1, 0, 1), - [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, ''), (3, '', '')] - ), - ((-2, -1), [('', ''), ('', 0), (0, 1), (1, 2), (2, 3), (3, '')]), - ((1, 2), [(1, 2), (2, 3), (3, '')]), - ]: - all_groups = mi.stagger( - iterable, offsets=offsets, fillvalue='', longest=True - ) - self.assertEqual(list(all_groups), expected) - - -class ZipOffsetTest(TestCase): - """Tests for ``zip_offset()``""" - - def test_shortest(self): - a_1 = [0, 1, 2, 3] - a_2 = [0, 1, 2, 3, 4, 5] - a_3 = [0, 1, 2, 3, 4, 5, 6, 7] - actual = list( - mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), fillvalue='') - ) - expected = [('', 0, 1), (0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5)] - self.assertEqual(actual, expected) - - def test_longest(self): - a_1 = [0, 1, 2, 3] - a_2 = [0, 1, 2, 3, 4, 5] - a_3 = [0, 1, 2, 3, 4, 5, 6, 7] - actual = list( - mi.zip_offset(a_1, a_2, a_3, offsets=(-1, 0, 1), longest=True) - ) - expected = [ - (None, 0, 1), - (0, 1, 2), - (1, 2, 3), - (2, 3, 4), - (3, 4, 5), - (None, 5, 6), - (None, None, 7), - ] - self.assertEqual(actual, expected) - - def test_mismatch(self): - iterables = [0, 1, 2], [2, 3, 4] - offsets = (-1, 0, 1) - self.assertRaises( - ValueError, - lambda: list(mi.zip_offset(*iterables, offsets=offsets)) - ) - - -class UnzipTests(TestCase): - """Tests for unzip()""" - - def test_empty_iterable(self): - self.assertEqual(list(mi.unzip([])), []) - # in reality zip([], [], []) is equivalent to iter([]) - # but it doesn't hurt to test both - self.assertEqual(list(mi.unzip(zip([], [], []))), []) - - def test_length_one_iterable(self): - xs, ys, zs = mi.unzip(zip([1], [2], [3])) - self.assertEqual(list(xs), [1]) - self.assertEqual(list(ys), [2]) - self.assertEqual(list(zs), [3]) - - def test_normal_case(self): - xs, ys, zs = range(10), range(1, 11), range(2, 12) - zipped = zip(xs, ys, zs) - xs, ys, zs = mi.unzip(zipped) - self.assertEqual(list(xs), list(range(10))) - self.assertEqual(list(ys), list(range(1, 11))) - self.assertEqual(list(zs), list(range(2, 12))) - - def test_improperly_zipped(self): - zipped = iter([(1, 2, 3), (4, 5), (6,)]) - xs, ys, zs = mi.unzip(zipped) - self.assertEqual(list(xs), [1, 4, 6]) - self.assertEqual(list(ys), [2, 5]) - self.assertEqual(list(zs), [3]) - - def test_increasingly_zipped(self): - zipped = iter([(1, 2), (3, 4, 5), (6, 7, 8, 9)]) - unzipped = mi.unzip(zipped) - # from the docstring: - # len(first tuple) is the number of iterables zipped - self.assertEqual(len(unzipped), 2) - xs, ys = unzipped - self.assertEqual(list(xs), [1, 3, 6]) - self.assertEqual(list(ys), [2, 4, 7]) - - -class SortTogetherTest(TestCase): - """Tests for sort_together()""" - - def test_key_list(self): - """tests `key_list` including default, iterables include duplicates""" - iterables = [ - ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'], - ['May', 'Aug.', 'May', 'June', 'July', 'July'], - [97, 20, 100, 70, 100, 20] - ] - - self.assertEqual( - mi.sort_together(iterables), - [ - ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'), - ('June', 'July', 'July', 'May', 'Aug.', 'May'), - (70, 100, 20, 97, 20, 100) - ] - ) - - self.assertEqual( - mi.sort_together(iterables, key_list=(0, 1)), - [ - ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'), - ('July', 'July', 'June', 'Aug.', 'May', 'May'), - (100, 20, 70, 20, 97, 100) - ] - ) - - self.assertEqual( - mi.sort_together(iterables, key_list=(0, 1, 2)), - [ - ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'), - ('July', 'July', 'June', 'Aug.', 'May', 'May'), - (20, 100, 70, 20, 97, 100) - ] - ) - - self.assertEqual( - mi.sort_together(iterables, key_list=(2,)), - [ - ('GA', 'CT', 'CT', 'GA', 'GA', 'CT'), - ('Aug.', 'July', 'June', 'May', 'May', 'July'), - (20, 20, 70, 97, 100, 100) - ] - ) - - def test_invalid_key_list(self): - """tests `key_list` for indexes not available in `iterables`""" - iterables = [ - ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'], - ['May', 'Aug.', 'May', 'June', 'July', 'July'], - [97, 20, 100, 70, 100, 20] - ] - - self.assertRaises( - IndexError, lambda: mi.sort_together(iterables, key_list=(5,)) - ) - - def test_reverse(self): - """tests `reverse` to ensure a reverse sort for `key_list` iterables""" - iterables = [ - ['GA', 'GA', 'GA', 'CT', 'CT', 'CT'], - ['May', 'Aug.', 'May', 'June', 'July', 'July'], - [97, 20, 100, 70, 100, 20] - ] - - self.assertEqual( - mi.sort_together(iterables, key_list=(0, 1, 2), reverse=True), - [('GA', 'GA', 'GA', 'CT', 'CT', 'CT'), - ('May', 'May', 'Aug.', 'June', 'July', 'July'), - (100, 97, 20, 70, 100, 20)] - ) - - def test_uneven_iterables(self): - """tests trimming of iterables to the shortest length before sorting""" - iterables = [['GA', 'GA', 'GA', 'CT', 'CT', 'CT', 'MA'], - ['May', 'Aug.', 'May', 'June', 'July', 'July'], - [97, 20, 100, 70, 100, 20, 0]] - - self.assertEqual( - mi.sort_together(iterables), - [ - ('CT', 'CT', 'CT', 'GA', 'GA', 'GA'), - ('June', 'July', 'July', 'May', 'Aug.', 'May'), - (70, 100, 20, 97, 20, 100) - ] - ) - - -class DivideTest(TestCase): - """Tests for divide()""" - - def test_invalid_n(self): - self.assertRaises(ValueError, lambda: mi.divide(-1, [1, 2, 3])) - self.assertRaises(ValueError, lambda: mi.divide(0, [1, 2, 3])) - - def test_basic(self): - iterable = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] - - for n, expected in [ - (1, [iterable]), - (2, [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]), - (3, [[1, 2, 3, 4], [5, 6, 7], [8, 9, 10]]), - (10, [[n] for n in range(1, 10 + 1)]), - ]: - self.assertEqual( - [list(x) for x in mi.divide(n, iterable)], expected - ) - - def test_large_n(self): - iterable = [1, 2, 3, 4] - self.assertEqual( - [list(x) for x in mi.divide(6, iterable)], - [[1], [2], [3], [4], [], []] - ) - - -class TestAlwaysIterable(TestCase): - """Tests for always_iterable()""" - def test_single(self): - self.assertEqual(list(mi.always_iterable(1)), [1]) - - def test_strings(self): - for obj in ['foo', b'bar', 'baz']: - actual = list(mi.always_iterable(obj)) - expected = [obj] - self.assertEqual(actual, expected) - - def test_base_type(self): - dict_obj = {'a': 1, 'b': 2} - str_obj = '123' - - # Default: dicts are iterable like they normally are - default_actual = list(mi.always_iterable(dict_obj)) - default_expected = list(dict_obj) - self.assertEqual(default_actual, default_expected) - - # Unitary types set: dicts are not iterable - custom_actual = list(mi.always_iterable(dict_obj, base_type=dict)) - custom_expected = [dict_obj] - self.assertEqual(custom_actual, custom_expected) - - # With unitary types set, strings are iterable - str_actual = list(mi.always_iterable(str_obj, base_type=None)) - str_expected = list(str_obj) - self.assertEqual(str_actual, str_expected) - - def test_iterables(self): - self.assertEqual(list(mi.always_iterable([0, 1])), [0, 1]) - self.assertEqual( - list(mi.always_iterable([0, 1], base_type=list)), [[0, 1]] - ) - self.assertEqual( - list(mi.always_iterable(iter('foo'))), ['f', 'o', 'o'] - ) - self.assertEqual(list(mi.always_iterable([])), []) - - def test_none(self): - self.assertEqual(list(mi.always_iterable(None)), []) - - def test_generator(self): - def _gen(): - yield 0 - yield 1 - - self.assertEqual(list(mi.always_iterable(_gen())), [0, 1]) - - -class AdjacentTests(TestCase): - def test_typical(self): - actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10))) - expected = [(True, 0), (True, 1), (False, 2), (False, 3), (True, 4), - (True, 5), (True, 6), (False, 7), (False, 8), (False, 9)] - self.assertEqual(actual, expected) - - def test_empty_iterable(self): - actual = list(mi.adjacent(lambda x: x % 5 == 0, [])) - expected = [] - self.assertEqual(actual, expected) - - def test_length_one(self): - actual = list(mi.adjacent(lambda x: x % 5 == 0, [0])) - expected = [(True, 0)] - self.assertEqual(actual, expected) - - actual = list(mi.adjacent(lambda x: x % 5 == 0, [1])) - expected = [(False, 1)] - self.assertEqual(actual, expected) - - def test_consecutive_true(self): - """Test that when the predicate matches multiple consecutive elements - it doesn't repeat elements in the output""" - actual = list(mi.adjacent(lambda x: x % 5 < 2, range(10))) - expected = [(True, 0), (True, 1), (True, 2), (False, 3), (True, 4), - (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)] - self.assertEqual(actual, expected) - - def test_distance(self): - actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=2)) - expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4), - (True, 5), (True, 6), (True, 7), (False, 8), (False, 9)] - self.assertEqual(actual, expected) - - actual = list(mi.adjacent(lambda x: x % 5 == 0, range(10), distance=3)) - expected = [(True, 0), (True, 1), (True, 2), (True, 3), (True, 4), - (True, 5), (True, 6), (True, 7), (True, 8), (False, 9)] - self.assertEqual(actual, expected) - - def test_large_distance(self): - """Test distance larger than the length of the iterable""" - iterable = range(10) - actual = list(mi.adjacent(lambda x: x % 5 == 4, iterable, distance=20)) - expected = list(zip(repeat(True), iterable)) - self.assertEqual(actual, expected) - - actual = list(mi.adjacent(lambda x: False, iterable, distance=20)) - expected = list(zip(repeat(False), iterable)) - self.assertEqual(actual, expected) - - def test_zero_distance(self): - """Test that adjacent() reduces to zip+map when distance is 0""" - iterable = range(1000) - predicate = lambda x: x % 4 == 2 - actual = mi.adjacent(predicate, iterable, 0) - expected = zip(map(predicate, iterable), iterable) - self.assertTrue(all(a == e for a, e in zip(actual, expected))) - - def test_negative_distance(self): - """Test that adjacent() raises an error with negative distance""" - pred = lambda x: x - self.assertRaises( - ValueError, lambda: mi.adjacent(pred, range(1000), -1) - ) - self.assertRaises( - ValueError, lambda: mi.adjacent(pred, range(10), -10) - ) - - def test_grouping(self): - """Test interaction of adjacent() with groupby_transform()""" - iterable = mi.adjacent(lambda x: x % 5 == 0, range(10)) - grouper = mi.groupby_transform(iterable, itemgetter(0), itemgetter(1)) - actual = [(k, list(g)) for k, g in grouper] - expected = [ - (True, [0, 1]), - (False, [2, 3]), - (True, [4, 5, 6]), - (False, [7, 8, 9]), - ] - self.assertEqual(actual, expected) - - def test_call_once(self): - """Test that the predicate is only called once per item.""" - already_seen = set() - iterable = range(10) - - def predicate(item): - self.assertNotIn(item, already_seen) - already_seen.add(item) - return True - - actual = list(mi.adjacent(predicate, iterable)) - expected = [(True, x) for x in iterable] - self.assertEqual(actual, expected) - - -class GroupByTransformTests(TestCase): - def assertAllGroupsEqual(self, groupby1, groupby2): - """Compare two groupby objects for equality, both keys and groups.""" - for a, b in zip(groupby1, groupby2): - key1, group1 = a - key2, group2 = b - self.assertEqual(key1, key2) - self.assertListEqual(list(group1), list(group2)) - self.assertRaises(StopIteration, lambda: next(groupby1)) - self.assertRaises(StopIteration, lambda: next(groupby2)) - - def test_default_funcs(self): - """Test that groupby_transform() with default args mimics groupby()""" - iterable = [(x // 5, x) for x in range(1000)] - actual = mi.groupby_transform(iterable) - expected = groupby(iterable) - self.assertAllGroupsEqual(actual, expected) - - def test_valuefunc(self): - iterable = [(int(x / 5), int(x / 3), x) for x in range(10)] - - # Test the standard usage of grouping one iterable using another's keys - grouper = mi.groupby_transform( - iterable, keyfunc=itemgetter(0), valuefunc=itemgetter(-1) - ) - actual = [(k, list(g)) for k, g in grouper] - expected = [(0, [0, 1, 2, 3, 4]), (1, [5, 6, 7, 8, 9])] - self.assertEqual(actual, expected) - - grouper = mi.groupby_transform( - iterable, keyfunc=itemgetter(1), valuefunc=itemgetter(-1) - ) - actual = [(k, list(g)) for k, g in grouper] - expected = [(0, [0, 1, 2]), (1, [3, 4, 5]), (2, [6, 7, 8]), (3, [9])] - self.assertEqual(actual, expected) - - # and now for something a little different - d = dict(zip(range(10), 'abcdefghij')) - grouper = mi.groupby_transform( - range(10), keyfunc=lambda x: x // 5, valuefunc=d.get - ) - actual = [(k, ''.join(g)) for k, g in grouper] - expected = [(0, 'abcde'), (1, 'fghij')] - self.assertEqual(actual, expected) - - def test_no_valuefunc(self): - iterable = range(1000) - - def key(x): - return x // 5 - - actual = mi.groupby_transform(iterable, key, valuefunc=None) - expected = groupby(iterable, key) - self.assertAllGroupsEqual(actual, expected) - - actual = mi.groupby_transform(iterable, key) # default valuefunc - expected = groupby(iterable, key) - self.assertAllGroupsEqual(actual, expected) - - -class NumericRangeTests(TestCase): - def test_basic(self): - for args, expected in [ - ((4,), [0, 1, 2, 3]), - ((4.0,), [0.0, 1.0, 2.0, 3.0]), - ((1.0, 4), [1.0, 2.0, 3.0]), - ((1, 4.0), [1, 2, 3]), - ((1.0, 5), [1.0, 2.0, 3.0, 4.0]), - ((0, 20, 5), [0, 5, 10, 15]), - ((0, 20, 5.0), [0.0, 5.0, 10.0, 15.0]), - ((0, 10, 3), [0, 3, 6, 9]), - ((0, 10, 3.0), [0.0, 3.0, 6.0, 9.0]), - ((0, -5, -1), [0, -1, -2, -3, -4]), - ((0.0, -5, -1), [0.0, -1.0, -2.0, -3.0, -4.0]), - ((1, 2, Fraction(1, 2)), [Fraction(1, 1), Fraction(3, 2)]), - ((0,), []), - ((0.0,), []), - ((1, 0), []), - ((1.0, 0.0), []), - ((Fraction(2, 1),), [Fraction(0, 1), Fraction(1, 1)]), - ((Decimal('2.0'),), [Decimal('0.0'), Decimal('1.0')]), - ( - ( - datetime(2019, 3, 29, 12, 34, 56), - datetime(2019, 3, 29, 12, 37, 55), - timedelta(minutes=1) - ), - [ - datetime(2019, 3, 29, 12, 34, 56), - datetime(2019, 3, 29, 12, 35, 56), - datetime(2019, 3, 29, 12, 36, 56), - ] - ), - ]: - actual = list(mi.numeric_range(*args)) - self.assertEqual(actual, expected) - self.assertTrue( - all(type(a) == type(e) for a, e in zip(actual, expected)) - ) - - def test_arg_count(self): - self.assertRaises(TypeError, lambda: list(mi.numeric_range())) - self.assertRaises( - TypeError, lambda: list(mi.numeric_range(0, 1, 2, 3)) - ) - - def test_zero_step(self): - for args in [ - (1, 2, 0), - ( - datetime(2019, 3, 29, 12, 34, 56), - datetime(2019, 3, 29, 12, 37, 55), - timedelta(minutes=0) - ), - ]: - with self.assertRaises(ValueError): - list(mi.numeric_range(*args)) - - -class CountCycleTests(TestCase): - def test_basic(self): - expected = [ - (0, 'a'), (0, 'b'), (0, 'c'), - (1, 'a'), (1, 'b'), (1, 'c'), - (2, 'a'), (2, 'b'), (2, 'c'), - ] - for actual in [ - mi.take(9, mi.count_cycle('abc')), # n=None - list(mi.count_cycle('abc', 3)), # n=3 - ]: - self.assertEqual(actual, expected) - - def test_empty(self): - self.assertEqual(list(mi.count_cycle('')), []) - self.assertEqual(list(mi.count_cycle('', 2)), []) - - def test_negative(self): - self.assertEqual(list(mi.count_cycle('abc', -3)), []) - - -class LocateTests(TestCase): - def test_default_pred(self): - iterable = [0, 1, 1, 0, 1, 0, 0] - actual = list(mi.locate(iterable)) - expected = [1, 2, 4] - self.assertEqual(actual, expected) - - def test_no_matches(self): - iterable = [0, 0, 0] - actual = list(mi.locate(iterable)) - expected = [] - self.assertEqual(actual, expected) - - def test_custom_pred(self): - iterable = ['0', 1, 1, '0', 1, '0', '0'] - pred = lambda x: x == '0' - actual = list(mi.locate(iterable, pred)) - expected = [0, 3, 5, 6] - self.assertEqual(actual, expected) - - def test_window_size(self): - iterable = ['0', 1, 1, '0', 1, '0', '0'] - pred = lambda *args: args == ('0', 1) - actual = list(mi.locate(iterable, pred, window_size=2)) - expected = [0, 3] - self.assertEqual(actual, expected) - - def test_window_size_large(self): - iterable = [1, 2, 3, 4] - pred = lambda a, b, c, d, e: True - actual = list(mi.locate(iterable, pred, window_size=5)) - expected = [0] - self.assertEqual(actual, expected) - - def test_window_size_zero(self): - iterable = [1, 2, 3, 4] - pred = lambda: True - with self.assertRaises(ValueError): - list(mi.locate(iterable, pred, window_size=0)) - - -class StripFunctionTests(TestCase): - def test_hashable(self): - iterable = list('www.example.com') - pred = lambda x: x in set('cmowz.') - - self.assertEqual(list(mi.lstrip(iterable, pred)), list('example.com')) - self.assertEqual(list(mi.rstrip(iterable, pred)), list('www.example')) - self.assertEqual(list(mi.strip(iterable, pred)), list('example')) - - def test_not_hashable(self): - iterable = [ - list('http://'), list('www'), list('.example'), list('.com') - ] - pred = lambda x: x in [list('http://'), list('www'), list('.com')] - - self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[2:]) - self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:3]) - self.assertEqual(list(mi.strip(iterable, pred)), iterable[2: 3]) - - def test_math(self): - iterable = [0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2] - pred = lambda x: x <= 2 - - self.assertEqual(list(mi.lstrip(iterable, pred)), iterable[3:]) - self.assertEqual(list(mi.rstrip(iterable, pred)), iterable[:-3]) - self.assertEqual(list(mi.strip(iterable, pred)), iterable[3:-3]) - - -class IsliceExtendedTests(TestCase): - def test_all(self): - iterable = ['0', '1', '2', '3', '4', '5'] - indexes = list(range(-4, len(iterable) + 4)) + [None] - steps = [1, 2, 3, 4, -1, -2, -3, 4] - for slice_args in product(indexes, indexes, steps): - try: - actual = list(mi.islice_extended(iterable, *slice_args)) - except Exception as e: - self.fail((slice_args, e)) - - expected = iterable[slice(*slice_args)] - self.assertEqual(actual, expected, slice_args) - - def test_zero_step(self): - with self.assertRaises(ValueError): - list(mi.islice_extended([1, 2, 3], 0, 1, 0)) - - -class ConsecutiveGroupsTest(TestCase): - def test_numbers(self): - iterable = [-10, -8, -7, -6, 1, 2, 4, 5, -1, 7] - actual = [list(g) for g in mi.consecutive_groups(iterable)] - expected = [[-10], [-8, -7, -6], [1, 2], [4, 5], [-1], [7]] - self.assertEqual(actual, expected) - - def test_custom_ordering(self): - iterable = ['1', '10', '11', '20', '21', '22', '30', '31'] - ordering = lambda x: int(x) - actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)] - expected = [['1'], ['10', '11'], ['20', '21', '22'], ['30', '31']] - self.assertEqual(actual, expected) - - def test_exotic_ordering(self): - iterable = [ - ('a', 'b', 'c', 'd'), - ('a', 'c', 'b', 'd'), - ('a', 'c', 'd', 'b'), - ('a', 'd', 'b', 'c'), - ('d', 'b', 'c', 'a'), - ('d', 'c', 'a', 'b'), - ] - ordering = list(permutations('abcd')).index - actual = [list(g) for g in mi.consecutive_groups(iterable, ordering)] - expected = [ - [('a', 'b', 'c', 'd')], - [('a', 'c', 'b', 'd'), ('a', 'c', 'd', 'b'), ('a', 'd', 'b', 'c')], - [('d', 'b', 'c', 'a'), ('d', 'c', 'a', 'b')], - ] - self.assertEqual(actual, expected) - - -class DifferenceTest(TestCase): - def test_normal(self): - iterable = [10, 20, 30, 40, 50] - actual = list(mi.difference(iterable)) - expected = [10, 10, 10, 10, 10] - self.assertEqual(actual, expected) - - def test_custom(self): - iterable = [10, 20, 30, 40, 50] - actual = list(mi.difference(iterable, add)) - expected = [10, 30, 50, 70, 90] - self.assertEqual(actual, expected) - - def test_roundtrip(self): - original = list(range(100)) - accumulated = accumulate(original) - actual = list(mi.difference(accumulated)) - self.assertEqual(actual, original) - - def test_one(self): - self.assertEqual(list(mi.difference([0])), [0]) - - def test_empty(self): - self.assertEqual(list(mi.difference([])), []) - - @skipIf(version_info[:2] < (3, 8), 'accumulate with initial needs 3.8+') - def test_initial(self): - original = list(range(100)) - accumulated = accumulate(original, initial=100) - actual = list(mi.difference(accumulated, initial=100)) - self.assertEqual(actual, original) - - -class SeekableTest(TestCase): - def test_exhaustion_reset(self): - iterable = [str(n) for n in range(10)] - - s = mi.seekable(iterable) - self.assertEqual(list(s), iterable) # Normal iteration - self.assertEqual(list(s), []) # Iterable is exhausted - - s.seek(0) - self.assertEqual(list(s), iterable) # Back in action - - def test_partial_reset(self): - iterable = [str(n) for n in range(10)] - - s = mi.seekable(iterable) - self.assertEqual(mi.take(5, s), iterable[:5]) # Normal iteration - - s.seek(1) - self.assertEqual(list(s), iterable[1:]) # Get the rest of the iterable - - def test_forward(self): - iterable = [str(n) for n in range(10)] - - s = mi.seekable(iterable) - self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration - - s.seek(3) # Skip over index 2 - self.assertEqual(list(s), iterable[3:]) # Result is similar to slicing - - s.seek(0) # Back to 0 - self.assertEqual(list(s), iterable) # No difference in result - - def test_past_end(self): - iterable = [str(n) for n in range(10)] - - s = mi.seekable(iterable) - self.assertEqual(mi.take(1, s), iterable[:1]) # Normal iteration - - s.seek(20) - self.assertEqual(list(s), []) # Iterable is exhausted - - s.seek(0) # Back to 0 - self.assertEqual(list(s), iterable) # No difference in result - - def test_elements(self): - iterable = map(str, count()) - - s = mi.seekable(iterable) - mi.take(10, s) - - elements = s.elements() - self.assertEqual( - [elements[i] for i in range(10)], [str(n) for n in range(10)] - ) - self.assertEqual(len(elements), 10) - - mi.take(10, s) - self.assertEqual(list(elements), [str(n) for n in range(20)]) - - -class SequenceViewTests(TestCase): - def test_init(self): - view = mi.SequenceView((1, 2, 3)) - self.assertEqual(repr(view), "SequenceView((1, 2, 3))") - self.assertRaises(TypeError, lambda: mi.SequenceView({})) - - def test_update(self): - seq = [1, 2, 3] - view = mi.SequenceView(seq) - self.assertEqual(len(view), 3) - self.assertEqual(repr(view), "SequenceView([1, 2, 3])") - - seq.pop() - self.assertEqual(len(view), 2) - self.assertEqual(repr(view), "SequenceView([1, 2])") - - def test_indexing(self): - seq = ('a', 'b', 'c', 'd', 'e', 'f') - view = mi.SequenceView(seq) - for i in range(-len(seq), len(seq)): - self.assertEqual(view[i], seq[i]) - - def test_slicing(self): - seq = ('a', 'b', 'c', 'd', 'e', 'f') - view = mi.SequenceView(seq) - n = len(seq) - indexes = list(range(-n - 1, n + 1)) + [None] - steps = list(range(-n, n + 1)) - steps.remove(0) - for slice_args in product(indexes, indexes, steps): - i = slice(*slice_args) - self.assertEqual(view[i], seq[i]) - - def test_abc_methods(self): - # collections.Sequence should provide all of this functionality - seq = ('a', 'b', 'c', 'd', 'e', 'f', 'f') - view = mi.SequenceView(seq) - - # __contains__ - self.assertIn('b', view) - self.assertNotIn('g', view) - - # __iter__ - self.assertEqual(list(iter(view)), list(seq)) - - # __reversed__ - self.assertEqual(list(reversed(view)), list(reversed(seq))) - - # index - self.assertEqual(view.index('b'), 1) - - # count - self.assertEqual(seq.count('f'), 2) - - -class RunLengthTest(TestCase): - def test_encode(self): - iterable = (int(str(n)[0]) for n in count(800)) - actual = mi.take(4, mi.run_length.encode(iterable)) - expected = [(8, 100), (9, 100), (1, 1000), (2, 1000)] - self.assertEqual(actual, expected) - - def test_decode(self): - iterable = [('d', 4), ('c', 3), ('b', 2), ('a', 1)] - actual = ''.join(mi.run_length.decode(iterable)) - expected = 'ddddcccbba' - self.assertEqual(actual, expected) - - -class ExactlyNTests(TestCase): - """Tests for ``exactly_n()``""" - - def test_true(self): - """Iterable has ``n`` ``True`` elements""" - self.assertTrue(mi.exactly_n([True, False, True], 2)) - self.assertTrue(mi.exactly_n([1, 1, 1, 0], 3)) - self.assertTrue(mi.exactly_n([False, False], 0)) - self.assertTrue(mi.exactly_n(range(100), 10, lambda x: x < 10)) - - def test_false(self): - """Iterable does not have ``n`` ``True`` elements""" - self.assertFalse(mi.exactly_n([True, False, False], 2)) - self.assertFalse(mi.exactly_n([True, True, False], 1)) - self.assertFalse(mi.exactly_n([False], 1)) - self.assertFalse(mi.exactly_n([True], -1)) - self.assertFalse(mi.exactly_n(repeat(True), 100)) - - def test_empty(self): - """Return ``True`` if the iterable is empty and ``n`` is 0""" - self.assertTrue(mi.exactly_n([], 0)) - self.assertFalse(mi.exactly_n([], 1)) - - -class AlwaysReversibleTests(TestCase): - """Tests for ``always_reversible()``""" - - def test_regular_reversed(self): - self.assertEqual(list(reversed(range(10))), - list(mi.always_reversible(range(10)))) - self.assertEqual(list(reversed([1, 2, 3])), - list(mi.always_reversible([1, 2, 3]))) - self.assertEqual(reversed([1, 2, 3]).__class__, - mi.always_reversible([1, 2, 3]).__class__) - - def test_nonseq_reversed(self): - # Create a non-reversible generator from a sequence - with self.assertRaises(TypeError): - reversed(x for x in range(10)) - - self.assertEqual(list(reversed(range(10))), - list(mi.always_reversible(x for x in range(10)))) - self.assertEqual(list(reversed([1, 2, 3])), - list(mi.always_reversible(x for x in [1, 2, 3]))) - self.assertNotEqual(reversed((1, 2)).__class__, - mi.always_reversible(x for x in (1, 2)).__class__) - - -class CircularShiftsTests(TestCase): - def test_empty(self): - # empty iterable -> empty list - self.assertEqual(list(mi.circular_shifts([])), []) - - def test_simple_circular_shifts(self): - # test the a simple iterator case - self.assertEqual( - mi.circular_shifts(range(4)), - [(0, 1, 2, 3), (1, 2, 3, 0), (2, 3, 0, 1), (3, 0, 1, 2)] - ) - - def test_duplicates(self): - # test non-distinct entries - self.assertEqual( - mi.circular_shifts([0, 1, 0, 1]), - [(0, 1, 0, 1), (1, 0, 1, 0), (0, 1, 0, 1), (1, 0, 1, 0)] - ) - - -class MakeDecoratorTests(TestCase): - def test_basic(self): - slicer = mi.make_decorator(islice) - - @slicer(1, 10, 2) - def user_function(arg_1, arg_2, kwarg_1=None): - self.assertEqual(arg_1, 'arg_1') - self.assertEqual(arg_2, 'arg_2') - self.assertEqual(kwarg_1, 'kwarg_1') - return map(str, count()) - - it = user_function('arg_1', 'arg_2', kwarg_1='kwarg_1') - actual = list(it) - expected = ['1', '3', '5', '7', '9'] - self.assertEqual(actual, expected) - - def test_result_index(self): - def stringify(*args, **kwargs): - self.assertEqual(args[0], 'arg_0') - iterable = args[1] - self.assertEqual(args[2], 'arg_2') - self.assertEqual(kwargs['kwarg_1'], 'kwarg_1') - return map(str, iterable) - - stringifier = mi.make_decorator(stringify, result_index=1) - - @stringifier('arg_0', 'arg_2', kwarg_1='kwarg_1') - def user_function(n): - return count(n) - - it = user_function(1) - actual = mi.take(5, it) - expected = ['1', '2', '3', '4', '5'] - self.assertEqual(actual, expected) - - def test_wrap_class(self): - seeker = mi.make_decorator(mi.seekable) - - @seeker() - def user_function(n): - return map(str, range(n)) - - it = user_function(5) - self.assertEqual(list(it), ['0', '1', '2', '3', '4']) - - it.seek(0) - self.assertEqual(list(it), ['0', '1', '2', '3', '4']) - - -class MapReduceTests(TestCase): - def test_default(self): - iterable = (str(x) for x in range(5)) - keyfunc = lambda x: int(x) // 2 - actual = sorted(mi.map_reduce(iterable, keyfunc).items()) - expected = [(0, ['0', '1']), (1, ['2', '3']), (2, ['4'])] - self.assertEqual(actual, expected) - - def test_valuefunc(self): - iterable = (str(x) for x in range(5)) - keyfunc = lambda x: int(x) // 2 - valuefunc = int - actual = sorted(mi.map_reduce(iterable, keyfunc, valuefunc).items()) - expected = [(0, [0, 1]), (1, [2, 3]), (2, [4])] - self.assertEqual(actual, expected) - - def test_reducefunc(self): - iterable = (str(x) for x in range(5)) - keyfunc = lambda x: int(x) // 2 - valuefunc = int - reducefunc = lambda value_list: reduce(mul, value_list, 1) - actual = sorted( - mi.map_reduce(iterable, keyfunc, valuefunc, reducefunc).items() - ) - expected = [(0, 0), (1, 6), (2, 4)] - self.assertEqual(actual, expected) - - def test_ret(self): - d = mi.map_reduce([1, 0, 2, 0, 1, 0], bool) - self.assertEqual(d, {False: [0, 0, 0], True: [1, 2, 1]}) - self.assertRaises(KeyError, lambda: d[None].append(1)) - - -class RlocateTests(TestCase): - def test_default_pred(self): - iterable = [0, 1, 1, 0, 1, 0, 0] - for it in (iterable[:], iter(iterable)): - actual = list(mi.rlocate(it)) - expected = [4, 2, 1] - self.assertEqual(actual, expected) - - def test_no_matches(self): - iterable = [0, 0, 0] - for it in (iterable[:], iter(iterable)): - actual = list(mi.rlocate(it)) - expected = [] - self.assertEqual(actual, expected) - - def test_custom_pred(self): - iterable = ['0', 1, 1, '0', 1, '0', '0'] - pred = lambda x: x == '0' - for it in (iterable[:], iter(iterable)): - actual = list(mi.rlocate(it, pred)) - expected = [6, 5, 3, 0] - self.assertEqual(actual, expected) - - def test_efficient_reversal(self): - iterable = range(9 ** 9) # Is efficiently reversible - target = 9 ** 9 - 2 - pred = lambda x: x == target # Find-able from the right - actual = next(mi.rlocate(iterable, pred)) - self.assertEqual(actual, target) - - def test_window_size(self): - iterable = ['0', 1, 1, '0', 1, '0', '0'] - pred = lambda *args: args == ('0', 1) - for it in (iterable, iter(iterable)): - actual = list(mi.rlocate(it, pred, window_size=2)) - expected = [3, 0] - self.assertEqual(actual, expected) - - def test_window_size_large(self): - iterable = [1, 2, 3, 4] - pred = lambda a, b, c, d, e: True - for it in (iterable, iter(iterable)): - actual = list(mi.rlocate(iterable, pred, window_size=5)) - expected = [0] - self.assertEqual(actual, expected) - - def test_window_size_zero(self): - iterable = [1, 2, 3, 4] - pred = lambda: True - for it in (iterable, iter(iterable)): - with self.assertRaises(ValueError): - list(mi.locate(iterable, pred, window_size=0)) - - -class ReplaceTests(TestCase): - def test_basic(self): - iterable = range(10) - pred = lambda x: x % 2 == 0 - substitutes = [] - actual = list(mi.replace(iterable, pred, substitutes)) - expected = [1, 3, 5, 7, 9] - self.assertEqual(actual, expected) - - def test_count(self): - iterable = range(10) - pred = lambda x: x % 2 == 0 - substitutes = [] - actual = list(mi.replace(iterable, pred, substitutes, count=4)) - expected = [1, 3, 5, 7, 8, 9] - self.assertEqual(actual, expected) - - def test_window_size(self): - iterable = range(10) - pred = lambda *args: args == (0, 1, 2) - substitutes = [] - actual = list(mi.replace(iterable, pred, substitutes, window_size=3)) - expected = [3, 4, 5, 6, 7, 8, 9] - self.assertEqual(actual, expected) - - def test_window_size_end(self): - iterable = range(10) - pred = lambda *args: args == (7, 8, 9) - substitutes = [] - actual = list(mi.replace(iterable, pred, substitutes, window_size=3)) - expected = [0, 1, 2, 3, 4, 5, 6] - self.assertEqual(actual, expected) - - def test_window_size_count(self): - iterable = range(10) - pred = lambda *args: (args == (0, 1, 2)) or (args == (7, 8, 9)) - substitutes = [] - actual = list( - mi.replace(iterable, pred, substitutes, count=1, window_size=3) - ) - expected = [3, 4, 5, 6, 7, 8, 9] - self.assertEqual(actual, expected) - - def test_window_size_large(self): - iterable = range(4) - pred = lambda a, b, c, d, e: True - substitutes = [5, 6, 7] - actual = list(mi.replace(iterable, pred, substitutes, window_size=5)) - expected = [5, 6, 7] - self.assertEqual(actual, expected) - - def test_window_size_zero(self): - iterable = range(10) - pred = lambda *args: True - substitutes = [] - with self.assertRaises(ValueError): - list(mi.replace(iterable, pred, substitutes, window_size=0)) - - def test_iterable_substitutes(self): - iterable = range(5) - pred = lambda x: x % 2 == 0 - substitutes = iter('__') - actual = list(mi.replace(iterable, pred, substitutes)) - expected = ['_', '_', 1, '_', '_', 3, '_', '_'] - self.assertEqual(actual, expected) - - -class PartitionsTest(TestCase): - def test_types(self): - for iterable in [ - 'abcd', - ['a', 'b', 'c', 'd'], - ('a', 'b', 'c', 'd'), - ]: - with self.subTest(iterable=iterable): - actual = list(mi.partitions(iterable)) - expected = [ - [['a', 'b', 'c', 'd']], - [['a'], ['b', 'c', 'd']], - [['a', 'b'], ['c', 'd']], - [['a', 'b', 'c'], ['d']], - [['a'], ['b'], ['c', 'd']], - [['a'], ['b', 'c'], ['d']], - [['a', 'b'], ['c'], ['d']], - [['a'], ['b'], ['c'], ['d']] - ] - self.assertEqual(actual, expected) - - def test_empty(self): - iterable = [] - actual = list(mi.partitions(iterable)) - expected = [[[]]] - self.assertEqual(actual, expected) - - def test_order(self): - iterable = iter([3, 2, 1]) - actual = list(mi.partitions(iterable)) - expected = [ - [[3, 2, 1]], - [[3], [2, 1]], - [[3, 2], [1]], - [[3], [2], [1]], - ] - self.assertEqual(actual, expected) - - def test_duplicates(self): - iterable = [1, 1, 1] - actual = list(mi.partitions(iterable)) - expected = [ - [[1, 1, 1]], - [[1], [1, 1]], - [[1, 1], [1]], - [[1], [1], [1]], - ] - self.assertEqual(actual, expected) - - -class SetPartitionsTests(TestCase): - def test_repeated(self): - it = 'aaa' - actual = [] - for part in mi.set_partitions(it, 2): - actual.append([''.join(p) for p in part]) - expected = [['a', 'aa'], ['a', 'aa'], ['a', 'aa']] - self.assertEqual(actual, expected) - - def test_each_correct(self): - a = frozenset(range(6)) - for soln in mi.set_partitions(a): - soln = frozenset(frozenset(part) for part in soln) - total = reduce(or_, soln) - self.assertEqual(a, total) - - def test_duplicates(self): - a = list(range(6)) # unique values so set comparision will work - solns = list(mi.set_partitions(a)) - unique_solns = frozenset( - frozenset( - frozenset(part) for part in soln) - for soln in solns) - self.assertEqual(len(solns), len(unique_solns)) - - def test_lexical_order(self): - def less(solnA, solnB): - """lexically orders solutions""" - if len(solnA) == len(solnB): - for partA, partB in zip(solnA, solnB): - if len(partA) == len(partB): - if partA == partB: - continue - else: - return partA < partB - else: - return len(partA) < len(partB) - else: - return len(solnA) < len(solnB) - for curr, nxt in mi.windowed(mi.set_partitions(range(6)), 2): - self.assertTrue(less(curr, nxt)) - - def test_found_all(self): - """small example, hand-checked""" - expected = [((0,), (1,), (2, 3, 4)), - ((0,), (2,), (1, 3, 4)), - ((0,), (3,), (1, 2, 4)), - ((0,), (4,), (1, 2, 3)), - ((0,), (1, 2), (3, 4)), - ((0,), (1, 3), (2, 4)), - ((0,), (1, 4), (2, 3)), - ((1,), (2,), (0, 3, 4)), - ((1,), (3,), (0, 2, 4)), - ((1,), (4,), (0, 2, 3)), - ((1,), (0, 2), (3, 4)), - ((1,), (0, 3), (2, 4)), - ((1,), (0, 4), (2, 3)), - ((2,), (3,), (0, 1, 4)), - ((2,), (4,), (0, 1, 3)), - ((2,), (0, 1), (3, 4)), - ((2,), (0, 3), (1, 4)), - ((2,), (0, 4), (1, 3)), - ((3,), (4,), (0, 1, 2)), - ((3,), (0, 1), (2, 4)), - ((3,), (0, 2), (1, 4)), - ((3,), (0, 4), (1, 2)), - ((4,), (0, 1), (2, 3)), - ((4,), (0, 2), (1, 3)), - ((4,), (0, 3), (1, 2))] - actual = list(mi.set_partitions(range(5), 3)) - for e, a in zip(expected, actual): - self.assertEqual(e, a) - - -class TimeLimitedTests(TestCase): - def test_basic(self): - def generator(): - yield 1 - yield 2 - sleep(0.2) - yield 3 - - iterable = generator() - actual = list(mi.time_limited(0.1, iterable)) - expected = [1, 2] - self.assertEqual(actual, expected) - - def test_zero_limit(self): - iterable = count() - actual = list(mi.time_limited(0, iterable)) - expected = [] - self.assertEqual(actual, expected) - - def test_invalid_limit(self): - with self.assertRaises(ValueError): - list(mi.time_limited(-0.1, count())) - - -class OnlyTests(TestCase): - def test_defaults(self): - self.assertEqual(mi.only([]), None) - self.assertEqual(mi.only([1]), 1) - self.assertRaises(ValueError, lambda: mi.only([1, 2])) - - def test_custom_value(self): - self.assertEqual(mi.only([], default='!'), '!') - self.assertEqual(mi.only([1], default='!'), 1) - self.assertRaises(ValueError, lambda: mi.only([1, 2], default='!')) - - def test_custom_exception(self): - self.assertEqual(mi.only([], too_long=RuntimeError), None) - self.assertEqual(mi.only([1], too_long=RuntimeError), 1) - self.assertRaises( - RuntimeError, lambda: mi.only([1, 2], too_long=RuntimeError) - ) - - -class IchunkedTests(TestCase): - def test_even(self): - iterable = (str(x) for x in range(10)) - actual = [''.join(c) for c in mi.ichunked(iterable, 5)] - expected = ['01234', '56789'] - self.assertEqual(actual, expected) - - def test_odd(self): - iterable = (str(x) for x in range(10)) - actual = [''.join(c) for c in mi.ichunked(iterable, 4)] - expected = ['0123', '4567', '89'] - self.assertEqual(actual, expected) - - def test_zero(self): - iterable = [] - actual = [list(c) for c in mi.ichunked(iterable, 0)] - expected = [] - self.assertEqual(actual, expected) - - def test_negative(self): - iterable = count() - with self.assertRaises(ValueError): - [list(c) for c in mi.ichunked(iterable, -1)] - - def test_out_of_order(self): - iterable = map(str, count()) - it = mi.ichunked(iterable, 4) - chunk_1 = next(it) - chunk_2 = next(it) - self.assertEqual(''.join(chunk_2), '4567') - self.assertEqual(''.join(chunk_1), '0123') - - def test_laziness(self): - def gen(): - yield 0 - raise RuntimeError - yield from count(1) - - it = mi.ichunked(gen(), 4) - chunk = next(it) - self.assertEqual(next(chunk), 0) - self.assertRaises(RuntimeError, next, it) - - -class DistinctCombinationsTests(TestCase): - def test_basic(self): - iterable = (1, 2, 2, 3, 3, 3) - for r in range(len(iterable)): - with self.subTest(r=r): - actual = sorted(mi.distinct_combinations(iterable, r)) - expected = sorted(set(combinations(iterable, r))) - self.assertEqual(actual, expected) - - def test_distinct(self): - iterable = list(range(6)) - for r in range(len(iterable)): - with self.subTest(r=r): - actual = list(mi.distinct_combinations(iterable, r)) - expected = list(combinations(iterable, r)) - self.assertEqual(actual, expected) - - def test_negative(self): - with self.assertRaises(ValueError): - list(mi.distinct_combinations([], -1)) - - def test_empty(self): - self.assertEqual(list(mi.distinct_combinations([], 2)), []) - - -class FilterExceptTests(TestCase): - def test_no_exceptions_pass(self): - iterable = '0123' - actual = list(mi.filter_except(int, iterable)) - expected = ['0', '1', '2', '3'] - self.assertEqual(actual, expected) - - def test_no_exceptions_raise(self): - iterable = ['0', '1', 'two', '3'] - with self.assertRaises(ValueError): - list(mi.filter_except(int, iterable)) - - def test_raise(self): - iterable = ['0', '1' '2', 'three', None] - with self.assertRaises(TypeError): - list(mi.filter_except(int, iterable, ValueError)) - - def test_false(self): - # Even if the validator returns false, we pass through - validator = lambda x: False - iterable = ['0', '1', '2', 'three', None] - actual = list(mi.filter_except(validator, iterable, Exception)) - expected = ['0', '1', '2', 'three', None] - self.assertEqual(actual, expected) - - def test_multiple(self): - iterable = ['0', '1', '2', 'three', None, '4'] - actual = list(mi.filter_except(int, iterable, ValueError, TypeError)) - expected = ['0', '1', '2', '4'] - self.assertEqual(actual, expected) - - -class MapExceptTests(TestCase): - def test_no_exceptions_pass(self): - iterable = '0123' - actual = list(mi.map_except(int, iterable)) - expected = [0, 1, 2, 3] - self.assertEqual(actual, expected) - - def test_no_exceptions_raise(self): - iterable = ['0', '1', 'two', '3'] - with self.assertRaises(ValueError): - list(mi.map_except(int, iterable)) - - def test_raise(self): - iterable = ['0', '1' '2', 'three', None] - with self.assertRaises(TypeError): - list(mi.map_except(int, iterable, ValueError)) - - def test_multiple(self): - iterable = ['0', '1', '2', 'three', None, '4'] - actual = list(mi.map_except(int, iterable, ValueError, TypeError)) - expected = [0, 1, 2, 4] - self.assertEqual(actual, expected) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_recipes.py b/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_recipes.py deleted file mode 100644 index 99db833..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/more_itertools/tests/test_recipes.py +++ /dev/null @@ -1,605 +0,0 @@ -import warnings -from doctest import DocTestSuite -from unittest import TestCase - -from itertools import combinations - -import more_itertools as mi - - -def load_tests(loader, tests, ignore): - # Add the doctests - tests.addTests(DocTestSuite('more_itertools.recipes')) - return tests - - -class TakeTests(TestCase): - """Tests for ``take()``""" - - def test_simple_take(self): - """Test basic usage""" - t = mi.take(5, range(10)) - self.assertEqual(t, [0, 1, 2, 3, 4]) - - def test_null_take(self): - """Check the null case""" - t = mi.take(0, range(10)) - self.assertEqual(t, []) - - def test_negative_take(self): - """Make sure taking negative items results in a ValueError""" - self.assertRaises(ValueError, lambda: mi.take(-3, range(10))) - - def test_take_too_much(self): - """Taking more than an iterator has remaining should return what the - iterator has remaining. - - """ - t = mi.take(10, range(5)) - self.assertEqual(t, [0, 1, 2, 3, 4]) - - -class TabulateTests(TestCase): - """Tests for ``tabulate()``""" - - def test_simple_tabulate(self): - """Test the happy path""" - t = mi.tabulate(lambda x: x) - f = tuple([next(t) for _ in range(3)]) - self.assertEqual(f, (0, 1, 2)) - - def test_count(self): - """Ensure tabulate accepts specific count""" - t = mi.tabulate(lambda x: 2 * x, -1) - f = (next(t), next(t), next(t)) - self.assertEqual(f, (-2, 0, 2)) - - -class TailTests(TestCase): - """Tests for ``tail()``""" - - def test_greater(self): - """Length of iterable is greater than requested tail""" - self.assertEqual(list(mi.tail(3, 'ABCDEFG')), ['E', 'F', 'G']) - - def test_equal(self): - """Length of iterable is equal to the requested tail""" - self.assertEqual( - list(mi.tail(7, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G'] - ) - - def test_less(self): - """Length of iterable is less than requested tail""" - self.assertEqual( - list(mi.tail(8, 'ABCDEFG')), ['A', 'B', 'C', 'D', 'E', 'F', 'G'] - ) - - -class ConsumeTests(TestCase): - """Tests for ``consume()``""" - - def test_sanity(self): - """Test basic functionality""" - r = (x for x in range(10)) - mi.consume(r, 3) - self.assertEqual(3, next(r)) - - def test_null_consume(self): - """Check the null case""" - r = (x for x in range(10)) - mi.consume(r, 0) - self.assertEqual(0, next(r)) - - def test_negative_consume(self): - """Check that negative consumsion throws an error""" - r = (x for x in range(10)) - self.assertRaises(ValueError, lambda: mi.consume(r, -1)) - - def test_total_consume(self): - """Check that iterator is totally consumed by default""" - r = (x for x in range(10)) - mi.consume(r) - self.assertRaises(StopIteration, lambda: next(r)) - - -class NthTests(TestCase): - """Tests for ``nth()``""" - - def test_basic(self): - """Make sure the nth item is returned""" - l = range(10) - for i, v in enumerate(l): - self.assertEqual(mi.nth(l, i), v) - - def test_default(self): - """Ensure a default value is returned when nth item not found""" - l = range(3) - self.assertEqual(mi.nth(l, 100, "zebra"), "zebra") - - def test_negative_item_raises(self): - """Ensure asking for a negative item raises an exception""" - self.assertRaises(ValueError, lambda: mi.nth(range(10), -3)) - - -class AllEqualTests(TestCase): - """Tests for ``all_equal()``""" - - def test_true(self): - """Everything is equal""" - self.assertTrue(mi.all_equal('aaaaaa')) - self.assertTrue(mi.all_equal([0, 0, 0, 0])) - - def test_false(self): - """Not everything is equal""" - self.assertFalse(mi.all_equal('aaaaab')) - self.assertFalse(mi.all_equal([0, 0, 0, 1])) - - def test_tricky(self): - """Not everything is identical, but everything is equal""" - items = [1, complex(1, 0), 1.0] - self.assertTrue(mi.all_equal(items)) - - def test_empty(self): - """Return True if the iterable is empty""" - self.assertTrue(mi.all_equal('')) - self.assertTrue(mi.all_equal([])) - - def test_one(self): - """Return True if the iterable is singular""" - self.assertTrue(mi.all_equal('0')) - self.assertTrue(mi.all_equal([0])) - - -class QuantifyTests(TestCase): - """Tests for ``quantify()``""" - - def test_happy_path(self): - """Make sure True count is returned""" - q = [True, False, True] - self.assertEqual(mi.quantify(q), 2) - - def test_custom_predicate(self): - """Ensure non-default predicates return as expected""" - q = range(10) - self.assertEqual(mi.quantify(q, lambda x: x % 2 == 0), 5) - - -class PadnoneTests(TestCase): - """Tests for ``padnone()``""" - - def test_happy_path(self): - """wrapper iterator should return None indefinitely""" - r = range(2) - p = mi.padnone(r) - self.assertEqual([0, 1, None, None], [next(p) for _ in range(4)]) - - -class NcyclesTests(TestCase): - """Tests for ``nyclces()``""" - - def test_happy_path(self): - """cycle a sequence three times""" - r = ["a", "b", "c"] - n = mi.ncycles(r, 3) - self.assertEqual( - ["a", "b", "c", "a", "b", "c", "a", "b", "c"], - list(n) - ) - - def test_null_case(self): - """asking for 0 cycles should return an empty iterator""" - n = mi.ncycles(range(100), 0) - self.assertRaises(StopIteration, lambda: next(n)) - - def test_pathalogical_case(self): - """asking for negative cycles should return an empty iterator""" - n = mi.ncycles(range(100), -10) - self.assertRaises(StopIteration, lambda: next(n)) - - -class DotproductTests(TestCase): - """Tests for ``dotproduct()``'""" - - def test_happy_path(self): - """simple dotproduct example""" - self.assertEqual(400, mi.dotproduct([10, 10], [20, 20])) - - -class FlattenTests(TestCase): - """Tests for ``flatten()``""" - - def test_basic_usage(self): - """ensure list of lists is flattened one level""" - f = [[0, 1, 2], [3, 4, 5]] - self.assertEqual(list(range(6)), list(mi.flatten(f))) - - def test_single_level(self): - """ensure list of lists is flattened only one level""" - f = [[0, [1, 2]], [[3, 4], 5]] - self.assertEqual([0, [1, 2], [3, 4], 5], list(mi.flatten(f))) - - -class RepeatfuncTests(TestCase): - """Tests for ``repeatfunc()``""" - - def test_simple_repeat(self): - """test simple repeated functions""" - r = mi.repeatfunc(lambda: 5) - self.assertEqual([5, 5, 5, 5, 5], [next(r) for _ in range(5)]) - - def test_finite_repeat(self): - """ensure limited repeat when times is provided""" - r = mi.repeatfunc(lambda: 5, times=5) - self.assertEqual([5, 5, 5, 5, 5], list(r)) - - def test_added_arguments(self): - """ensure arguments are applied to the function""" - r = mi.repeatfunc(lambda x: x, 2, 3) - self.assertEqual([3, 3], list(r)) - - def test_null_times(self): - """repeat 0 should return an empty iterator""" - r = mi.repeatfunc(range, 0, 3) - self.assertRaises(StopIteration, lambda: next(r)) - - -class PairwiseTests(TestCase): - """Tests for ``pairwise()``""" - - def test_base_case(self): - """ensure an iterable will return pairwise""" - p = mi.pairwise([1, 2, 3]) - self.assertEqual([(1, 2), (2, 3)], list(p)) - - def test_short_case(self): - """ensure an empty iterator if there's not enough values to pair""" - p = mi.pairwise("a") - self.assertRaises(StopIteration, lambda: next(p)) - - -class GrouperTests(TestCase): - """Tests for ``grouper()``""" - - def test_even(self): - """Test when group size divides evenly into the length of - the iterable. - - """ - self.assertEqual( - list(mi.grouper('ABCDEF', 3)), [('A', 'B', 'C'), ('D', 'E', 'F')] - ) - - def test_odd(self): - """Test when group size does not divide evenly into the length of the - iterable. - - """ - self.assertEqual( - list(mi.grouper('ABCDE', 3)), [('A', 'B', 'C'), ('D', 'E', None)] - ) - - def test_fill_value(self): - """Test that the fill value is used to pad the final group""" - self.assertEqual( - list(mi.grouper('ABCDE', 3, 'x')), - [('A', 'B', 'C'), ('D', 'E', 'x')] - ) - - def test_legacy_order(self): - """Historically, grouper expected the n as the first parameter""" - with warnings.catch_warnings(record=True) as caught: - warnings.simplefilter('always') - self.assertEqual( - list(mi.grouper(3, 'ABCDEF')), - [('A', 'B', 'C'), ('D', 'E', 'F')], - ) - - warning, = caught - assert warning.category == DeprecationWarning - - -class RoundrobinTests(TestCase): - """Tests for ``roundrobin()``""" - - def test_even_groups(self): - """Ensure ordered output from evenly populated iterables""" - self.assertEqual( - list(mi.roundrobin('ABC', [1, 2, 3], range(3))), - ['A', 1, 0, 'B', 2, 1, 'C', 3, 2] - ) - - def test_uneven_groups(self): - """Ensure ordered output from unevenly populated iterables""" - self.assertEqual( - list(mi.roundrobin('ABCD', [1, 2], range(0))), - ['A', 1, 'B', 2, 'C', 'D'] - ) - - -class PartitionTests(TestCase): - """Tests for ``partition()``""" - - def test_bool(self): - """Test when pred() returns a boolean""" - lesser, greater = mi.partition(lambda x: x > 5, range(10)) - self.assertEqual(list(lesser), [0, 1, 2, 3, 4, 5]) - self.assertEqual(list(greater), [6, 7, 8, 9]) - - def test_arbitrary(self): - """Test when pred() returns an integer""" - divisibles, remainders = mi.partition(lambda x: x % 3, range(10)) - self.assertEqual(list(divisibles), [0, 3, 6, 9]) - self.assertEqual(list(remainders), [1, 2, 4, 5, 7, 8]) - - -class PowersetTests(TestCase): - """Tests for ``powerset()``""" - - def test_combinatorics(self): - """Ensure a proper enumeration""" - p = mi.powerset([1, 2, 3]) - self.assertEqual( - list(p), - [(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)] - ) - - -class UniqueEverseenTests(TestCase): - """Tests for ``unique_everseen()``""" - - def test_everseen(self): - """ensure duplicate elements are ignored""" - u = mi.unique_everseen('AAAABBBBCCDAABBB') - self.assertEqual( - ['A', 'B', 'C', 'D'], - list(u) - ) - - def test_custom_key(self): - """ensure the custom key comparison works""" - u = mi.unique_everseen('aAbACCc', key=str.lower) - self.assertEqual(list('abC'), list(u)) - - def test_unhashable(self): - """ensure things work for unhashable items""" - iterable = ['a', [1, 2, 3], [1, 2, 3], 'a'] - u = mi.unique_everseen(iterable) - self.assertEqual(list(u), ['a', [1, 2, 3]]) - - def test_unhashable_key(self): - """ensure things work for unhashable items with a custom key""" - iterable = ['a', [1, 2, 3], [1, 2, 3], 'a'] - u = mi.unique_everseen(iterable, key=lambda x: x) - self.assertEqual(list(u), ['a', [1, 2, 3]]) - - -class UniqueJustseenTests(TestCase): - """Tests for ``unique_justseen()``""" - - def test_justseen(self): - """ensure only last item is remembered""" - u = mi.unique_justseen('AAAABBBCCDABB') - self.assertEqual(list('ABCDAB'), list(u)) - - def test_custom_key(self): - """ensure the custom key comparison works""" - u = mi.unique_justseen('AABCcAD', str.lower) - self.assertEqual(list('ABCAD'), list(u)) - - -class IterExceptTests(TestCase): - """Tests for ``iter_except()``""" - - def test_exact_exception(self): - """ensure the exact specified exception is caught""" - l = [1, 2, 3] - i = mi.iter_except(l.pop, IndexError) - self.assertEqual(list(i), [3, 2, 1]) - - def test_generic_exception(self): - """ensure the generic exception can be caught""" - l = [1, 2] - i = mi.iter_except(l.pop, Exception) - self.assertEqual(list(i), [2, 1]) - - def test_uncaught_exception_is_raised(self): - """ensure a non-specified exception is raised""" - l = [1, 2, 3] - i = mi.iter_except(l.pop, KeyError) - self.assertRaises(IndexError, lambda: list(i)) - - def test_first(self): - """ensure first is run before the function""" - l = [1, 2, 3] - f = lambda: 25 - i = mi.iter_except(l.pop, IndexError, f) - self.assertEqual(list(i), [25, 3, 2, 1]) - - -class FirstTrueTests(TestCase): - """Tests for ``first_true()``""" - - def test_something_true(self): - """Test with no keywords""" - self.assertEqual(mi.first_true(range(10)), 1) - - def test_nothing_true(self): - """Test default return value.""" - self.assertIsNone(mi.first_true([0, 0, 0])) - - def test_default(self): - """Test with a default keyword""" - self.assertEqual(mi.first_true([0, 0, 0], default='!'), '!') - - def test_pred(self): - """Test with a custom predicate""" - self.assertEqual( - mi.first_true([2, 4, 6], pred=lambda x: x % 3 == 0), 6 - ) - - -class RandomProductTests(TestCase): - """Tests for ``random_product()`` - - Since random.choice() has different results with the same seed across - python versions 2.x and 3.x, these tests use highly probably events to - create predictable outcomes across platforms. - """ - - def test_simple_lists(self): - """Ensure that one item is chosen from each list in each pair. - Also ensure that each item from each list eventually appears in - the chosen combinations. - - Odds are roughly 1 in 7.1 * 10e16 that one item from either list will - not be chosen after 100 samplings of one item from each list. Just to - be safe, better use a known random seed, too. - - """ - nums = [1, 2, 3] - lets = ['a', 'b', 'c'] - n, m = zip(*[mi.random_product(nums, lets) for _ in range(100)]) - n, m = set(n), set(m) - self.assertEqual(n, set(nums)) - self.assertEqual(m, set(lets)) - self.assertEqual(len(n), len(nums)) - self.assertEqual(len(m), len(lets)) - - def test_list_with_repeat(self): - """ensure multiple items are chosen, and that they appear to be chosen - from one list then the next, in proper order. - - """ - nums = [1, 2, 3] - lets = ['a', 'b', 'c'] - r = list(mi.random_product(nums, lets, repeat=100)) - self.assertEqual(2 * 100, len(r)) - n, m = set(r[::2]), set(r[1::2]) - self.assertEqual(n, set(nums)) - self.assertEqual(m, set(lets)) - self.assertEqual(len(n), len(nums)) - self.assertEqual(len(m), len(lets)) - - -class RandomPermutationTests(TestCase): - """Tests for ``random_permutation()``""" - - def test_full_permutation(self): - """ensure every item from the iterable is returned in a new ordering - - 15 elements have a 1 in 1.3 * 10e12 of appearing in sorted order, so - we fix a seed value just to be sure. - - """ - i = range(15) - r = mi.random_permutation(i) - self.assertEqual(set(i), set(r)) - if i == r: - raise AssertionError("Values were not permuted") - - def test_partial_permutation(self): - """ensure all returned items are from the iterable, that the returned - permutation is of the desired length, and that all items eventually - get returned. - - Sampling 100 permutations of length 5 from a set of 15 leaves a - (2/3)^100 chance that an item will not be chosen. Multiplied by 15 - items, there is a 1 in 2.6e16 chance that at least 1 item will not - show up in the resulting output. Using a random seed will fix that. - - """ - items = range(15) - item_set = set(items) - all_items = set() - for _ in range(100): - permutation = mi.random_permutation(items, 5) - self.assertEqual(len(permutation), 5) - permutation_set = set(permutation) - self.assertLessEqual(permutation_set, item_set) - all_items |= permutation_set - self.assertEqual(all_items, item_set) - - -class RandomCombinationTests(TestCase): - """Tests for ``random_combination()``""" - - def test_pseudorandomness(self): - """ensure different subsets of the iterable get returned over many - samplings of random combinations""" - items = range(15) - all_items = set() - for _ in range(50): - combination = mi.random_combination(items, 5) - all_items |= set(combination) - self.assertEqual(all_items, set(items)) - - def test_no_replacement(self): - """ensure that elements are sampled without replacement""" - items = range(15) - for _ in range(50): - combination = mi.random_combination(items, len(items)) - self.assertEqual(len(combination), len(set(combination))) - self.assertRaises( - ValueError, lambda: mi.random_combination(items, len(items) + 1) - ) - - -class RandomCombinationWithReplacementTests(TestCase): - """Tests for ``random_combination_with_replacement()``""" - - def test_replacement(self): - """ensure that elements are sampled with replacement""" - items = range(5) - combo = mi.random_combination_with_replacement(items, len(items) * 2) - self.assertEqual(2 * len(items), len(combo)) - if len(set(combo)) == len(combo): - raise AssertionError("Combination contained no duplicates") - - def test_pseudorandomness(self): - """ensure different subsets of the iterable get returned over many - samplings of random combinations""" - items = range(15) - all_items = set() - for _ in range(50): - combination = mi.random_combination_with_replacement(items, 5) - all_items |= set(combination) - self.assertEqual(all_items, set(items)) - - -class NthCombinationTests(TestCase): - def test_basic(self): - iterable = 'abcdefg' - r = 4 - for index, expected in enumerate(combinations(iterable, r)): - actual = mi.nth_combination(iterable, r, index) - self.assertEqual(actual, expected) - - def test_long(self): - actual = mi.nth_combination(range(180), 4, 2000000) - expected = (2, 12, 35, 126) - self.assertEqual(actual, expected) - - def test_invalid_r(self): - for r in (-1, 3): - with self.assertRaises(ValueError): - mi.nth_combination([], r, 0) - - def test_invalid_index(self): - with self.assertRaises(IndexError): - mi.nth_combination('abcdefg', 3, -36) - - -class PrependTests(TestCase): - def test_basic(self): - value = 'a' - iterator = iter('bcdefg') - actual = list(mi.prepend(value, iterator)) - expected = list('abcdefg') - self.assertEqual(actual, expected) - - def test_multiple(self): - value = 'ab' - iterator = iter('cdefg') - actual = tuple(mi.prepend(value, iterator)) - expected = ('ab',) + tuple('cdefg') - self.assertEqual(actual, expected) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE deleted file mode 100644 index 6f62d44..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made -under the terms of *both* these licenses. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.APACHE b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.APACHE deleted file mode 100644 index 4947287..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.APACHE +++ /dev/null @@ -1,177 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.BSD b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.BSD deleted file mode 100644 index 42ce7b7..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/LICENSE.BSD +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) Donald Stufft and individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/METADATA deleted file mode 100644 index 2283dd4..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/METADATA +++ /dev/null @@ -1,326 +0,0 @@ -Metadata-Version: 2.1 -Name: packaging -Version: 19.2 -Summary: Core utilities for Python packages -Home-page: https://github.com/pypa/packaging -Author: Donald Stufft and individual contributors -Author-email: donald@stufft.io -License: BSD or Apache License, Version 2.0 -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* -Description-Content-Type: text/x-rst -Requires-Dist: pyparsing (>=2.0.2) -Requires-Dist: six - -packaging -========= - -Core utilities for Python packages. - -The ``packaging`` project includes the following: version handling, specifiers, -markers, requirements, tags, utilities. - -Documentation -------------- - -The `documentation`_ provides information and the API for the following: - -- Version Handling -- Specifiers -- Markers -- Requirements -- Tags -- Utilities - -Installation ------------- - -Use ``pip`` to install these utilities:: - - pip install packaging - -Discussion ----------- - -If you run into bugs, you can file them in our `issue tracker`_. - -You can also join ``#pypa`` on Freenode to ask questions or get involved. - - -.. _`documentation`: https://packaging.pypa.io/ -.. _`issue tracker`: https://github.com/pypa/packaging/issues - - -Code of Conduct ---------------- - -Everyone interacting in the packaging project's codebases, issue trackers, chat -rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. - -.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ - -Contributing ------------- - -The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as -well as how to report a potential security issue. The documentation for this -project also covers information about `project development`_ and `security`_. - -.. _`project development`: https://packaging.pypa.io/en/latest/development/ -.. _`security`: https://packaging.pypa.io/en/latest/security/ - -Project History ---------------- - -Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for -recent changes and project history. - -.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ - -Changelog ---------- - -19.2 - 2019-09-18 -~~~~~~~~~~~~~~~~~ - -* Remove dependency on ``attrs`` (`#178 `__, `#179 `__) - -* Use appropriate fallbacks for CPython ABI tag (`#181 `__, `#185 `__) - -* Add manylinux2014 support (`#186 `__) - -* Improve ABI detection (`#181 `__) - -* Properly handle debug wheels for Python 3.8 (`#172 `__) - -* Improve detection of debug builds on Windows (`#194 `__) - -19.1 - 2019-07-30 -~~~~~~~~~~~~~~~~~ - -* Add the ``packaging.tags`` module. (`#156 `__) - -* Correctly handle two-digit versions in ``python_version`` (`#119 `__) - - -19.0 - 2019-01-20 -~~~~~~~~~~~~~~~~~ - -* Fix string representation of PEP 508 direct URL requirements with markers. - -* Better handling of file URLs - - This allows for using ``file:///absolute/path``, which was previously - prevented due to the missing ``netloc``. - - This allows for all file URLs that ``urlunparse`` turns back into the - original URL to be valid. - - -18.0 - 2018-09-26 -~~~~~~~~~~~~~~~~~ - -* Improve error messages when invalid requirements are given. (`#129 `__) - - -17.1 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Fix ``utils.canonicalize_version`` when supplying non PEP 440 versions. - - -17.0 - 2017-02-28 -~~~~~~~~~~~~~~~~~ - -* Drop support for python 2.6, 3.2, and 3.3. - -* Define minimal pyparsing version to 2.0.2 (`#91 `__). - -* Add ``epoch``, ``release``, ``pre``, ``dev``, and ``post`` attributes to - ``Version`` and ``LegacyVersion`` (`#34 `__). - -* Add ``Version().is_devrelease`` and ``LegacyVersion().is_devrelease`` to - make it easy to determine if a release is a development release. - -* Add ``utils.canonicalize_version`` to canonicalize version strings or - ``Version`` instances (`#121 `__). - - -16.8 - 2016-10-29 -~~~~~~~~~~~~~~~~~ - -* Fix markers that utilize ``in`` so that they render correctly. - -* Fix an erroneous test on Python RC releases. - - -16.7 - 2016-04-23 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated ``python_implementation`` marker which was - an undocumented setuptools marker in addition to the newer markers. - - -16.6 - 2016-03-29 -~~~~~~~~~~~~~~~~~ - -* Add support for the deprecated, PEP 345 environment markers in addition to - the newer markers. - - -16.5 - 2016-02-26 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements with whitespaces between the comma - separators. - - -16.4 - 2016-02-22 -~~~~~~~~~~~~~~~~~ - -* Fix a regression in parsing requirements like ``foo (==4)``. - - -16.3 - 2016-02-21 -~~~~~~~~~~~~~~~~~ - -* Fix a bug where ``packaging.requirements:Requirement`` was overly strict when - matching legacy requirements. - - -16.2 - 2016-02-09 -~~~~~~~~~~~~~~~~~ - -* Add a function that implements the name canonicalization from PEP 503. - - -16.1 - 2016-02-07 -~~~~~~~~~~~~~~~~~ - -* Implement requirement specifiers from PEP 508. - - -16.0 - 2016-01-19 -~~~~~~~~~~~~~~~~~ - -* Relicense so that packaging is available under *either* the Apache License, - Version 2.0 or a 2 Clause BSD license. - -* Support installation of packaging when only distutils is available. - -* Fix ``==`` comparison when there is a prefix and a local version in play. - (`#41 `__). - -* Implement environment markers from PEP 508. - - -15.3 - 2015-08-01 -~~~~~~~~~~~~~~~~~ - -* Normalize post-release spellings for rev/r prefixes. `#35 `__ - - -15.2 - 2015-05-13 -~~~~~~~~~~~~~~~~~ - -* Fix an error where the arbitary specifier (``===``) was not correctly - allowing pre-releases when it was being used. - -* Expose the specifier and version parts through properties on the - ``Specifier`` classes. - -* Allow iterating over the ``SpecifierSet`` to get access to all of the - ``Specifier`` instances. - -* Allow testing if a version is contained within a specifier via the ``in`` - operator. - - -15.1 - 2015-04-13 -~~~~~~~~~~~~~~~~~ - -* Fix a logic error that was causing inconsistent answers about whether or not - a pre-release was contained within a ``SpecifierSet`` or not. - - -15.0 - 2015-01-02 -~~~~~~~~~~~~~~~~~ - -* Add ``Version().is_postrelease`` and ``LegacyVersion().is_postrelease`` to - make it easy to determine if a release is a post release. - -* Add ``Version().base_version`` and ``LegacyVersion().base_version`` to make - it easy to get the public version without any pre or post release markers. - -* Support the update to PEP 440 which removed the implied ``!=V.*`` when using - either ``>V`` or ``V`` or ````) operator. - - -14.3 - 2014-11-19 -~~~~~~~~~~~~~~~~~ - -* **BACKWARDS INCOMPATIBLE** Refactor specifier support so that it can sanely - handle legacy specifiers as well as PEP 440 specifiers. - -* **BACKWARDS INCOMPATIBLE** Move the specifier support out of - ``packaging.version`` into ``packaging.specifiers``. - - -14.2 - 2014-09-10 -~~~~~~~~~~~~~~~~~ - -* Add prerelease support to ``Specifier``. -* Remove the ability to do ``item in Specifier()`` and replace it with - ``Specifier().contains(item)`` in order to allow flags that signal if a - prerelease should be accepted or not. -* Add a method ``Specifier().filter()`` which will take an iterable and returns - an iterable with items that do not match the specifier filtered out. - - -14.1 - 2014-09-08 -~~~~~~~~~~~~~~~~~ - -* Allow ``LegacyVersion`` and ``Version`` to be sorted together. -* Add ``packaging.version.parse()`` to enable easily parsing a version string - as either a ``Version`` or a ``LegacyVersion`` depending on it's PEP 440 - validity. - - -14.0 - 2014-09-05 -~~~~~~~~~~~~~~~~~ - -* Initial release. - - -.. _`master`: https://github.com/pypa/packaging/ - - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/RECORD deleted file mode 100644 index a6b4edd..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/RECORD +++ /dev/null @@ -1,28 +0,0 @@ -packaging-19.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -packaging-19.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 -packaging-19.2.dist-info/LICENSE.APACHE,sha256=DOwG4OVfvD3FzuT8qbYH9my49OTbzzs8ATWU3RVnMuk,10173 -packaging-19.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 -packaging-19.2.dist-info/METADATA,sha256=hwikcP_MnOmBPJWbRDoPfzqCuV424X0U7QCaBCxn_gA,9224 -packaging-19.2.dist-info/RECORD,, -packaging-19.2.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110 -packaging-19.2.dist-info/top_level.txt,sha256=zFdHrhWnPslzsiP455HutQsqPB6v0KCtNUMtUtrefDw,10 -packaging/__about__.py,sha256=CpuMSyh1V7adw8QMjWKkY3LtdqRUkRX4MgJ6nF4stM0,744 -packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 -packaging/__pycache__/__about__.cpython-37.pyc,, -packaging/__pycache__/__init__.cpython-37.pyc,, -packaging/__pycache__/_compat.cpython-37.pyc,, -packaging/__pycache__/_structures.cpython-37.pyc,, -packaging/__pycache__/markers.cpython-37.pyc,, -packaging/__pycache__/requirements.cpython-37.pyc,, -packaging/__pycache__/specifiers.cpython-37.pyc,, -packaging/__pycache__/tags.cpython-37.pyc,, -packaging/__pycache__/utils.cpython-37.pyc,, -packaging/__pycache__/version.cpython-37.pyc,, -packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865 -packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416 -packaging/markers.py,sha256=g6HRCklg_3YQg6lHcMsctcwaI6KCw3AFsNGOmV2z6mI,8214 -packaging/requirements.py,sha256=1BnLnuPsAIpbYRi0jBemiD26Z7ukCzpIy-zP_0pv_4k,4652 -packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778 -packaging/tags.py,sha256=EPLXhO6GTD7_oiWEO1U0l0PkfR8R_xivpMDHXnsTlts,12933 -packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520 -packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978 diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/WHEEL deleted file mode 100644 index 8b701e9..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.33.6) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/top_level.txt deleted file mode 100644 index 748809f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging-19.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -packaging diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/__about__.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/__about__.py deleted file mode 100644 index dc95138..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/__about__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] - -__title__ = "packaging" -__summary__ = "Core utilities for Python packages" -__uri__ = "https://github.com/pypa/packaging" - -__version__ = "19.2" - -__author__ = "Donald Stufft and individual contributors" -__email__ = "donald@stufft.io" - -__license__ = "BSD or Apache License, Version 2.0" -__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/__init__.py deleted file mode 100644 index a0cf67d..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -from .__about__ import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - __version__, -) - -__all__ = [ - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/_compat.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/_compat.py deleted file mode 100644 index 25da473..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/_compat.py +++ /dev/null @@ -1,31 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import sys - - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - -# flake8: noqa - -if PY3: - string_types = (str,) -else: - string_types = (basestring,) - - -def with_metaclass(meta, *bases): - """ - Create a base class with a metaclass. - """ - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(meta): - def __new__(cls, name, this_bases, d): - return meta(name, bases, d) - - return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/_structures.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/_structures.py deleted file mode 100644 index 68dcca6..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/_structures.py +++ /dev/null @@ -1,68 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - - -class Infinity(object): - def __repr__(self): - return "Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return False - - def __le__(self, other): - return False - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return True - - def __ge__(self, other): - return True - - def __neg__(self): - return NegativeInfinity - - -Infinity = Infinity() - - -class NegativeInfinity(object): - def __repr__(self): - return "-Infinity" - - def __hash__(self): - return hash(repr(self)) - - def __lt__(self, other): - return True - - def __le__(self, other): - return True - - def __eq__(self, other): - return isinstance(other, self.__class__) - - def __ne__(self, other): - return not isinstance(other, self.__class__) - - def __gt__(self, other): - return False - - def __ge__(self, other): - return False - - def __neg__(self): - return Infinity - - -NegativeInfinity = NegativeInfinity() diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/markers.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/markers.py deleted file mode 100644 index 3b8af32..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/markers.py +++ /dev/null @@ -1,296 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import operator -import os -import platform -import sys - -from pyparsing import ParseException, ParseResults, stringStart, stringEnd -from pyparsing import ZeroOrMore, Group, Forward, QuotedString -from pyparsing import Literal as L # noqa - -from ._compat import string_types -from .specifiers import Specifier, InvalidSpecifier - - -__all__ = [ - "InvalidMarker", - "UndefinedComparison", - "UndefinedEnvironmentName", - "Marker", - "default_environment", -] - - -class InvalidMarker(ValueError): - """ - An invalid marker was found, users should refer to PEP 508. - """ - - -class UndefinedComparison(ValueError): - """ - An invalid operation was attempted on a value that doesn't support it. - """ - - -class UndefinedEnvironmentName(ValueError): - """ - A name was attempted to be used that does not exist inside of the - environment. - """ - - -class Node(object): - def __init__(self, value): - self.value = value - - def __str__(self): - return str(self.value) - - def __repr__(self): - return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) - - def serialize(self): - raise NotImplementedError - - -class Variable(Node): - def serialize(self): - return str(self) - - -class Value(Node): - def serialize(self): - return '"{0}"'.format(self) - - -class Op(Node): - def serialize(self): - return str(self) - - -VARIABLE = ( - L("implementation_version") - | L("platform_python_implementation") - | L("implementation_name") - | L("python_full_version") - | L("platform_release") - | L("platform_version") - | L("platform_machine") - | L("platform_system") - | L("python_version") - | L("sys_platform") - | L("os_name") - | L("os.name") - | L("sys.platform") # PEP-345 - | L("platform.version") # PEP-345 - | L("platform.machine") # PEP-345 - | L("platform.python_implementation") # PEP-345 - | L("python_implementation") # PEP-345 - | L("extra") # undocumented setuptools legacy -) -ALIASES = { - "os.name": "os_name", - "sys.platform": "sys_platform", - "platform.version": "platform_version", - "platform.machine": "platform_machine", - "platform.python_implementation": "platform_python_implementation", - "python_implementation": "platform_python_implementation", -} -VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) - -VERSION_CMP = ( - L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") -) - -MARKER_OP = VERSION_CMP | L("not in") | L("in") -MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) - -MARKER_VALUE = QuotedString("'") | QuotedString('"') -MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) - -BOOLOP = L("and") | L("or") - -MARKER_VAR = VARIABLE | MARKER_VALUE - -MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) -MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) - -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() - -MARKER_EXPR = Forward() -MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) -MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) - -MARKER = stringStart + MARKER_EXPR + stringEnd - - -def _coerce_parse_result(results): - if isinstance(results, ParseResults): - return [_coerce_parse_result(i) for i in results] - else: - return results - - -def _format_marker(marker, first=True): - assert isinstance(marker, (list, tuple, string_types)) - - # Sometimes we have a structure like [[...]] which is a single item list - # where the single item is itself it's own list. In that case we want skip - # the rest of this function so that we don't get extraneous () on the - # outside. - if ( - isinstance(marker, list) - and len(marker) == 1 - and isinstance(marker[0], (list, tuple)) - ): - return _format_marker(marker[0]) - - if isinstance(marker, list): - inner = (_format_marker(m, first=False) for m in marker) - if first: - return " ".join(inner) - else: - return "(" + " ".join(inner) + ")" - elif isinstance(marker, tuple): - return " ".join([m.serialize() for m in marker]) - else: - return marker - - -_operators = { - "in": lambda lhs, rhs: lhs in rhs, - "not in": lambda lhs, rhs: lhs not in rhs, - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "!=": operator.ne, - ">=": operator.ge, - ">": operator.gt, -} - - -def _eval_op(lhs, op, rhs): - try: - spec = Specifier("".join([op.serialize(), rhs])) - except InvalidSpecifier: - pass - else: - return spec.contains(lhs) - - oper = _operators.get(op.serialize()) - if oper is None: - raise UndefinedComparison( - "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) - ) - - return oper(lhs, rhs) - - -_undefined = object() - - -def _get_env(environment, name): - value = environment.get(name, _undefined) - - if value is _undefined: - raise UndefinedEnvironmentName( - "{0!r} does not exist in evaluation environment.".format(name) - ) - - return value - - -def _evaluate_markers(markers, environment): - groups = [[]] - - for marker in markers: - assert isinstance(marker, (list, tuple, string_types)) - - if isinstance(marker, list): - groups[-1].append(_evaluate_markers(marker, environment)) - elif isinstance(marker, tuple): - lhs, op, rhs = marker - - if isinstance(lhs, Variable): - lhs_value = _get_env(environment, lhs.value) - rhs_value = rhs.value - else: - lhs_value = lhs.value - rhs_value = _get_env(environment, rhs.value) - - groups[-1].append(_eval_op(lhs_value, op, rhs_value)) - else: - assert marker in ["and", "or"] - if marker == "or": - groups.append([]) - - return any(all(item) for item in groups) - - -def format_full_version(info): - version = "{0.major}.{0.minor}.{0.micro}".format(info) - kind = info.releaselevel - if kind != "final": - version += kind[0] + str(info.serial) - return version - - -def default_environment(): - if hasattr(sys, "implementation"): - iver = format_full_version(sys.implementation.version) - implementation_name = sys.implementation.name - else: - iver = "0" - implementation_name = "" - - return { - "implementation_name": implementation_name, - "implementation_version": iver, - "os_name": os.name, - "platform_machine": platform.machine(), - "platform_release": platform.release(), - "platform_system": platform.system(), - "platform_version": platform.version(), - "python_full_version": platform.python_version(), - "platform_python_implementation": platform.python_implementation(), - "python_version": ".".join(platform.python_version_tuple()[:2]), - "sys_platform": sys.platform, - } - - -class Marker(object): - def __init__(self, marker): - try: - self._markers = _coerce_parse_result(MARKER.parseString(marker)) - except ParseException as e: - err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( - marker, marker[e.loc : e.loc + 8] - ) - raise InvalidMarker(err_str) - - def __str__(self): - return _format_marker(self._markers) - - def __repr__(self): - return "".format(str(self)) - - def evaluate(self, environment=None): - """Evaluate a marker. - - Return the boolean from evaluating the given marker against the - environment. environment is an optional argument to override all or - part of the determined environment. - - The environment is determined from the current Python process. - """ - current_environment = default_environment() - if environment is not None: - current_environment.update(environment) - - return _evaluate_markers(self._markers, current_environment) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/requirements.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/requirements.py deleted file mode 100644 index 4d9688b..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/requirements.py +++ /dev/null @@ -1,138 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import string -import re - -from pyparsing import stringStart, stringEnd, originalTextFor, ParseException -from pyparsing import ZeroOrMore, Word, Optional, Regex, Combine -from pyparsing import Literal as L # noqa -from six.moves.urllib import parse as urlparse - -from .markers import MARKER_EXPR, Marker -from .specifiers import LegacySpecifier, Specifier, SpecifierSet - - -class InvalidRequirement(ValueError): - """ - An invalid requirement was found, users should refer to PEP 508. - """ - - -ALPHANUM = Word(string.ascii_letters + string.digits) - -LBRACKET = L("[").suppress() -RBRACKET = L("]").suppress() -LPAREN = L("(").suppress() -RPAREN = L(")").suppress() -COMMA = L(",").suppress() -SEMICOLON = L(";").suppress() -AT = L("@").suppress() - -PUNCTUATION = Word("-_.") -IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) -IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) - -NAME = IDENTIFIER("name") -EXTRA = IDENTIFIER - -URI = Regex(r"[^ ]+")("url") -URL = AT + URI - -EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) -EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") - -VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) -VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) - -VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY -VERSION_MANY = Combine( - VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False -)("_raw_spec") -_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) -_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") - -VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") -VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) - -MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") -MARKER_EXPR.setParseAction( - lambda s, l, t: Marker(s[t._original_start : t._original_end]) -) -MARKER_SEPARATOR = SEMICOLON -MARKER = MARKER_SEPARATOR + MARKER_EXPR - -VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) -URL_AND_MARKER = URL + Optional(MARKER) - -NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) - -REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd -# pyparsing isn't thread safe during initialization, so we do it eagerly, see -# issue #104 -REQUIREMENT.parseString("x[]") - - -class Requirement(object): - """Parse a requirement. - - Parse a given requirement string into its parts, such as name, specifier, - URL, and extras. Raises InvalidRequirement on a badly-formed requirement - string. - """ - - # TODO: Can we test whether something is contained within a requirement? - # If so how do we do that? Do we need to test against the _name_ of - # the thing as well as the version? What about the markers? - # TODO: Can we normalize the name and extra name? - - def __init__(self, requirement_string): - try: - req = REQUIREMENT.parseString(requirement_string) - except ParseException as e: - raise InvalidRequirement( - 'Parse error at "{0!r}": {1}'.format( - requirement_string[e.loc : e.loc + 8], e.msg - ) - ) - - self.name = req.name - if req.url: - parsed_url = urlparse.urlparse(req.url) - if parsed_url.scheme == "file": - if urlparse.urlunparse(parsed_url) != req.url: - raise InvalidRequirement("Invalid URL given") - elif not (parsed_url.scheme and parsed_url.netloc) or ( - not parsed_url.scheme and not parsed_url.netloc - ): - raise InvalidRequirement("Invalid URL: {0}".format(req.url)) - self.url = req.url - else: - self.url = None - self.extras = set(req.extras.asList() if req.extras else []) - self.specifier = SpecifierSet(req.specifier) - self.marker = req.marker if req.marker else None - - def __str__(self): - parts = [self.name] - - if self.extras: - parts.append("[{0}]".format(",".join(sorted(self.extras)))) - - if self.specifier: - parts.append(str(self.specifier)) - - if self.url: - parts.append("@ {0}".format(self.url)) - if self.marker: - parts.append(" ") - - if self.marker: - parts.append("; {0}".format(self.marker)) - - return "".join(parts) - - def __repr__(self): - return "".format(str(self)) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/specifiers.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/specifiers.py deleted file mode 100644 index 743576a..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/specifiers.py +++ /dev/null @@ -1,749 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import abc -import functools -import itertools -import re - -from ._compat import string_types, with_metaclass -from .version import Version, LegacyVersion, parse - - -class InvalidSpecifier(ValueError): - """ - An invalid specifier was found, users should refer to PEP 440. - """ - - -class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): - @abc.abstractmethod - def __str__(self): - """ - Returns the str representation of this Specifier like object. This - should be representative of the Specifier itself. - """ - - @abc.abstractmethod - def __hash__(self): - """ - Returns a hash value for this Specifier like object. - """ - - @abc.abstractmethod - def __eq__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are equal. - """ - - @abc.abstractmethod - def __ne__(self, other): - """ - Returns a boolean representing whether or not the two Specifier like - objects are not equal. - """ - - @abc.abstractproperty - def prereleases(self): - """ - Returns whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @prereleases.setter - def prereleases(self, value): - """ - Sets whether or not pre-releases as a whole are allowed by this - specifier. - """ - - @abc.abstractmethod - def contains(self, item, prereleases=None): - """ - Determines if the given item is contained within this specifier. - """ - - @abc.abstractmethod - def filter(self, iterable, prereleases=None): - """ - Takes an iterable of items and filters them so that only items which - are contained within this specifier are allowed in it. - """ - - -class _IndividualSpecifier(BaseSpecifier): - - _operators = {} - - def __init__(self, spec="", prereleases=None): - match = self._regex.search(spec) - if not match: - raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) - - self._spec = (match.group("operator").strip(), match.group("version").strip()) - - # Store whether or not this Specifier should accept prereleases - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) - - def __str__(self): - return "{0}{1}".format(*self._spec) - - def __hash__(self): - return hash(self._spec) - - def __eq__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec == other._spec - - def __ne__(self, other): - if isinstance(other, string_types): - try: - other = self.__class__(other) - except InvalidSpecifier: - return NotImplemented - elif not isinstance(other, self.__class__): - return NotImplemented - - return self._spec != other._spec - - def _get_operator(self, op): - return getattr(self, "_compare_{0}".format(self._operators[op])) - - def _coerce_version(self, version): - if not isinstance(version, (LegacyVersion, Version)): - version = parse(version) - return version - - @property - def operator(self): - return self._spec[0] - - @property - def version(self): - return self._spec[1] - - @property - def prereleases(self): - return self._prereleases - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Determine if prereleases are to be allowed or not. - if prereleases is None: - prereleases = self.prereleases - - # Normalize item to a Version or LegacyVersion, this allows us to have - # a shortcut for ``"2.0" in Specifier(">=2") - item = self._coerce_version(item) - - # Determine if we should be supporting prereleases in this specifier - # or not, if we do not support prereleases than we can short circuit - # logic if this version is a prereleases. - if item.is_prerelease and not prereleases: - return False - - # Actually do the comparison to determine if this item is contained - # within this Specifier or not. - return self._get_operator(self.operator)(item, self.version) - - def filter(self, iterable, prereleases=None): - yielded = False - found_prereleases = [] - - kw = {"prereleases": prereleases if prereleases is not None else True} - - # Attempt to iterate over all the values in the iterable and if any of - # them match, yield them. - for version in iterable: - parsed_version = self._coerce_version(version) - - if self.contains(parsed_version, **kw): - # If our version is a prerelease, and we were not set to allow - # prereleases, then we'll store it for later incase nothing - # else matches this specifier. - if parsed_version.is_prerelease and not ( - prereleases or self.prereleases - ): - found_prereleases.append(version) - # Either this is not a prerelease, or we should have been - # accepting prereleases from the beginning. - else: - yielded = True - yield version - - # Now that we've iterated over everything, determine if we've yielded - # any values, and if we have not and we have any prereleases stored up - # then we will go ahead and yield the prereleases. - if not yielded and found_prereleases: - for version in found_prereleases: - yield version - - -class LegacySpecifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(==|!=|<=|>=|<|>)) - \s* - (?P - [^,;\s)]* # Since this is a "legacy" specifier, and the version - # string can be just about anything, we match everything - # except for whitespace, a semi-colon for marker support, - # a closing paren since versions can be enclosed in - # them, and a comma since it's a version separator. - ) - """ - - _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) - - _operators = { - "==": "equal", - "!=": "not_equal", - "<=": "less_than_equal", - ">=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - } - - def _coerce_version(self, version): - if not isinstance(version, LegacyVersion): - version = LegacyVersion(str(version)) - return version - - def _compare_equal(self, prospective, spec): - return prospective == self._coerce_version(spec) - - def _compare_not_equal(self, prospective, spec): - return prospective != self._coerce_version(spec) - - def _compare_less_than_equal(self, prospective, spec): - return prospective <= self._coerce_version(spec) - - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= self._coerce_version(spec) - - def _compare_less_than(self, prospective, spec): - return prospective < self._coerce_version(spec) - - def _compare_greater_than(self, prospective, spec): - return prospective > self._coerce_version(spec) - - -def _require_version_compare(fn): - @functools.wraps(fn) - def wrapped(self, prospective, spec): - if not isinstance(prospective, Version): - return False - return fn(self, prospective, spec) - - return wrapped - - -class Specifier(_IndividualSpecifier): - - _regex_str = r""" - (?P(~=|==|!=|<=|>=|<|>|===)) - (?P - (?: - # The identity operators allow for an escape hatch that will - # do an exact string match of the version you wish to install. - # This will not be parsed by PEP 440 and we cannot determine - # any semantic meaning from it. This operator is discouraged - # but included entirely as an escape hatch. - (?<====) # Only match for the identity operator - \s* - [^\s]* # We just match everything, except for whitespace - # since we are only testing for strict identity. - ) - | - (?: - # The (non)equality operators allow for wild card and local - # versions to be specified so we have to define these two - # operators separately to enable that. - (?<===|!=) # Only match for equals and not equals - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)* # release - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - - # You cannot use a wild card and a dev or local version - # together so group them with a | and make them optional. - (?: - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local - | - \.\* # Wild card syntax of .* - )? - ) - | - (?: - # The compatible operator requires at least two digits in the - # release segment. - (?<=~=) # Only match for the compatible operator - - \s* - v? - (?:[0-9]+!)? # epoch - [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) - (?: # pre release - [-_\.]? - (a|b|c|rc|alpha|beta|pre|preview) - [-_\.]? - [0-9]* - )? - (?: # post release - (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) - )? - (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release - ) - | - (?: - # All other operators only allow a sub set of what the - # (non)equality operators do. Specifically they do not allow - # local versions to be specified nor do they allow the prefix - # matching wild cards. - (?=": "greater_than_equal", - "<": "less_than", - ">": "greater_than", - "===": "arbitrary", - } - - @_require_version_compare - def _compare_compatible(self, prospective, spec): - # Compatible releases have an equivalent combination of >= and ==. That - # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to - # implement this in terms of the other specifiers instead of - # implementing it ourselves. The only thing we need to do is construct - # the other specifiers. - - # We want everything but the last item in the version, but we want to - # ignore post and dev releases and we want to treat the pre-release as - # it's own separate segment. - prefix = ".".join( - list( - itertools.takewhile( - lambda x: (not x.startswith("post") and not x.startswith("dev")), - _version_split(spec), - ) - )[:-1] - ) - - # Add the prefix notation to the end of our string - prefix += ".*" - - return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( - prospective, prefix - ) - - @_require_version_compare - def _compare_equal(self, prospective, spec): - # We need special logic to handle prefix matching - if spec.endswith(".*"): - # In the case of prefix matching we want to ignore local segment. - prospective = Version(prospective.public) - # Split the spec out by dots, and pretend that there is an implicit - # dot in between a release segment and a pre-release segment. - spec = _version_split(spec[:-2]) # Remove the trailing .* - - # Split the prospective version out by dots, and pretend that there - # is an implicit dot in between a release segment and a pre-release - # segment. - prospective = _version_split(str(prospective)) - - # Shorten the prospective version to be the same length as the spec - # so that we can determine if the specifier is a prefix of the - # prospective version or not. - prospective = prospective[: len(spec)] - - # Pad out our two sides with zeros so that they both equal the same - # length. - spec, prospective = _pad_version(spec, prospective) - else: - # Convert our spec string into a Version - spec = Version(spec) - - # If the specifier does not have a local segment, then we want to - # act as if the prospective version also does not have a local - # segment. - if not spec.local: - prospective = Version(prospective.public) - - return prospective == spec - - @_require_version_compare - def _compare_not_equal(self, prospective, spec): - return not self._compare_equal(prospective, spec) - - @_require_version_compare - def _compare_less_than_equal(self, prospective, spec): - return prospective <= Version(spec) - - @_require_version_compare - def _compare_greater_than_equal(self, prospective, spec): - return prospective >= Version(spec) - - @_require_version_compare - def _compare_less_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is less than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective < spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a pre-release version, that we do not accept pre-release - # versions for the version mentioned in the specifier (e.g. <3.1 should - # not match 3.1.dev0, but should match 3.0.dev0). - if not spec.is_prerelease and prospective.is_prerelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # less than the spec version *and* it's not a pre-release of the same - # version in the spec. - return True - - @_require_version_compare - def _compare_greater_than(self, prospective, spec): - # Convert our spec to a Version instance, since we'll want to work with - # it as a version. - spec = Version(spec) - - # Check to see if the prospective version is greater than the spec - # version. If it's not we can short circuit and just return False now - # instead of doing extra unneeded work. - if not prospective > spec: - return False - - # This special case is here so that, unless the specifier itself - # includes is a post-release version, that we do not accept - # post-release versions for the version mentioned in the specifier - # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). - if not spec.is_postrelease and prospective.is_postrelease: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # Ensure that we do not allow a local version of the version mentioned - # in the specifier, which is technically greater than, to match. - if prospective.local is not None: - if Version(prospective.base_version) == Version(spec.base_version): - return False - - # If we've gotten to here, it means that prospective version is both - # greater than the spec version *and* it's not a pre-release of the - # same version in the spec. - return True - - def _compare_arbitrary(self, prospective, spec): - return str(prospective).lower() == str(spec).lower() - - @property - def prereleases(self): - # If there is an explicit prereleases set for this, then we'll just - # blindly use that. - if self._prereleases is not None: - return self._prereleases - - # Look at all of our specifiers and determine if they are inclusive - # operators, and if they are if they are including an explicit - # prerelease. - operator, version = self._spec - if operator in ["==", ">=", "<=", "~=", "==="]: - # The == specifier can include a trailing .*, if it does we - # want to remove before parsing. - if operator == "==" and version.endswith(".*"): - version = version[:-2] - - # Parse the version, and if it is a pre-release than this - # specifier allows pre-releases. - if parse(version).is_prerelease: - return True - - return False - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - -_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") - - -def _version_split(version): - result = [] - for item in version.split("."): - match = _prefix_regex.search(item) - if match: - result.extend(match.groups()) - else: - result.append(item) - return result - - -def _pad_version(left, right): - left_split, right_split = [], [] - - # Get the release segment of our versions - left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) - right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) - - # Get the rest of our versions - left_split.append(left[len(left_split[0]) :]) - right_split.append(right[len(right_split[0]) :]) - - # Insert our padding - left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) - right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) - - return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) - - -class SpecifierSet(BaseSpecifier): - def __init__(self, specifiers="", prereleases=None): - # Split on , to break each indidivual specifier into it's own item, and - # strip each item to remove leading/trailing whitespace. - specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] - - # Parsed each individual specifier, attempting first to make it a - # Specifier and falling back to a LegacySpecifier. - parsed = set() - for specifier in specifiers: - try: - parsed.add(Specifier(specifier)) - except InvalidSpecifier: - parsed.add(LegacySpecifier(specifier)) - - # Turn our parsed specifiers into a frozen set and save them for later. - self._specs = frozenset(parsed) - - # Store our prereleases value so we can use it later to determine if - # we accept prereleases or not. - self._prereleases = prereleases - - def __repr__(self): - pre = ( - ", prereleases={0!r}".format(self.prereleases) - if self._prereleases is not None - else "" - ) - - return "".format(str(self), pre) - - def __str__(self): - return ",".join(sorted(str(s) for s in self._specs)) - - def __hash__(self): - return hash(self._specs) - - def __and__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - specifier = SpecifierSet() - specifier._specs = frozenset(self._specs | other._specs) - - if self._prereleases is None and other._prereleases is not None: - specifier._prereleases = other._prereleases - elif self._prereleases is not None and other._prereleases is None: - specifier._prereleases = self._prereleases - elif self._prereleases == other._prereleases: - specifier._prereleases = self._prereleases - else: - raise ValueError( - "Cannot combine SpecifierSets with True and False prerelease " - "overrides." - ) - - return specifier - - def __eq__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs == other._specs - - def __ne__(self, other): - if isinstance(other, string_types): - other = SpecifierSet(other) - elif isinstance(other, _IndividualSpecifier): - other = SpecifierSet(str(other)) - elif not isinstance(other, SpecifierSet): - return NotImplemented - - return self._specs != other._specs - - def __len__(self): - return len(self._specs) - - def __iter__(self): - return iter(self._specs) - - @property - def prereleases(self): - # If we have been given an explicit prerelease modifier, then we'll - # pass that through here. - if self._prereleases is not None: - return self._prereleases - - # If we don't have any specifiers, and we don't have a forced value, - # then we'll just return None since we don't know if this should have - # pre-releases or not. - if not self._specs: - return None - - # Otherwise we'll see if any of the given specifiers accept - # prereleases, if any of them do we'll return True, otherwise False. - return any(s.prereleases for s in self._specs) - - @prereleases.setter - def prereleases(self, value): - self._prereleases = value - - def __contains__(self, item): - return self.contains(item) - - def contains(self, item, prereleases=None): - # Ensure that our item is a Version or LegacyVersion instance. - if not isinstance(item, (LegacyVersion, Version)): - item = parse(item) - - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # We can determine if we're going to allow pre-releases by looking to - # see if any of the underlying items supports them. If none of them do - # and this item is a pre-release then we do not allow it and we can - # short circuit that here. - # Note: This means that 1.0.dev1 would not be contained in something - # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 - if not prereleases and item.is_prerelease: - return False - - # We simply dispatch to the underlying specs here to make sure that the - # given version is contained within all of them. - # Note: This use of all() here means that an empty set of specifiers - # will always return True, this is an explicit design decision. - return all(s.contains(item, prereleases=prereleases) for s in self._specs) - - def filter(self, iterable, prereleases=None): - # Determine if we're forcing a prerelease or not, if we're not forcing - # one for this particular filter call, then we'll use whatever the - # SpecifierSet thinks for whether or not we should support prereleases. - if prereleases is None: - prereleases = self.prereleases - - # If we have any specifiers, then we want to wrap our iterable in the - # filter method for each one, this will act as a logical AND amongst - # each specifier. - if self._specs: - for spec in self._specs: - iterable = spec.filter(iterable, prereleases=bool(prereleases)) - return iterable - # If we do not have any specifiers, then we need to have a rough filter - # which will filter out any pre-releases, unless there are no final - # releases, and which will filter out LegacyVersion in general. - else: - filtered = [] - found_prereleases = [] - - for item in iterable: - # Ensure that we some kind of Version class for this item. - if not isinstance(item, (LegacyVersion, Version)): - parsed_version = parse(item) - else: - parsed_version = item - - # Filter out any item which is parsed as a LegacyVersion - if isinstance(parsed_version, LegacyVersion): - continue - - # Store any item which is a pre-release for later unless we've - # already found a final version or we are accepting prereleases - if parsed_version.is_prerelease and not prereleases: - if not filtered: - found_prereleases.append(item) - else: - filtered.append(item) - - # If we've found no items except for pre-releases, then we'll go - # ahead and use the pre-releases - if not filtered and found_prereleases and prereleases is None: - return found_prereleases - - return filtered diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/tags.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/tags.py deleted file mode 100644 index ec9942f..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/tags.py +++ /dev/null @@ -1,404 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import absolute_import - -import distutils.util - -try: - from importlib.machinery import EXTENSION_SUFFIXES -except ImportError: # pragma: no cover - import imp - - EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] - del imp -import platform -import re -import sys -import sysconfig -import warnings - - -INTERPRETER_SHORT_NAMES = { - "python": "py", # Generic. - "cpython": "cp", - "pypy": "pp", - "ironpython": "ip", - "jython": "jy", -} - - -_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 - - -class Tag(object): - - __slots__ = ["_interpreter", "_abi", "_platform"] - - def __init__(self, interpreter, abi, platform): - self._interpreter = interpreter.lower() - self._abi = abi.lower() - self._platform = platform.lower() - - @property - def interpreter(self): - return self._interpreter - - @property - def abi(self): - return self._abi - - @property - def platform(self): - return self._platform - - def __eq__(self, other): - return ( - (self.platform == other.platform) - and (self.abi == other.abi) - and (self.interpreter == other.interpreter) - ) - - def __hash__(self): - return hash((self._interpreter, self._abi, self._platform)) - - def __str__(self): - return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) - - def __repr__(self): - return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) - - -def parse_tag(tag): - tags = set() - interpreters, abis, platforms = tag.split("-") - for interpreter in interpreters.split("."): - for abi in abis.split("."): - for platform_ in platforms.split("."): - tags.add(Tag(interpreter, abi, platform_)) - return frozenset(tags) - - -def _normalize_string(string): - return string.replace(".", "_").replace("-", "_") - - -def _cpython_interpreter(py_version): - # TODO: Is using py_version_nodot for interpreter version critical? - return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1]) - - -def _cpython_abis(py_version): - abis = [] - version = "{}{}".format(*py_version[:2]) - debug = pymalloc = ucs4 = "" - with_debug = sysconfig.get_config_var("Py_DEBUG") - has_refcount = hasattr(sys, "gettotalrefcount") - # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled - # extension modules is the best option. - # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 - has_ext = "_d.pyd" in EXTENSION_SUFFIXES - if with_debug or (with_debug is None and (has_refcount or has_ext)): - debug = "d" - if py_version < (3, 8): - with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC") - if with_pymalloc or with_pymalloc is None: - pymalloc = "m" - if py_version < (3, 3): - unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE") - if unicode_size == 4 or ( - unicode_size is None and sys.maxunicode == 0x10FFFF - ): - ucs4 = "u" - elif debug: - # Debug builds can also load "normal" extension modules. - # We can also assume no UCS-4 or pymalloc requirement. - abis.append("cp{version}".format(version=version)) - abis.insert( - 0, - "cp{version}{debug}{pymalloc}{ucs4}".format( - version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 - ), - ) - return abis - - -def _cpython_tags(py_version, interpreter, abis, platforms): - for abi in abis: - for platform_ in platforms: - yield Tag(interpreter, abi, platform_) - for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): - yield tag - for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): - yield tag - # PEP 384 was first implemented in Python 3.2. - for minor_version in range(py_version[1] - 1, 1, -1): - for platform_ in platforms: - interpreter = "cp{major}{minor}".format( - major=py_version[0], minor=minor_version - ) - yield Tag(interpreter, "abi3", platform_) - - -def _pypy_interpreter(): - return "pp{py_major}{pypy_major}{pypy_minor}".format( - py_major=sys.version_info[0], - pypy_major=sys.pypy_version_info.major, - pypy_minor=sys.pypy_version_info.minor, - ) - - -def _generic_abi(): - abi = sysconfig.get_config_var("SOABI") - if abi: - return _normalize_string(abi) - else: - return "none" - - -def _pypy_tags(py_version, interpreter, abi, platforms): - for tag in (Tag(interpreter, abi, platform) for platform in platforms): - yield tag - for tag in (Tag(interpreter, "none", platform) for platform in platforms): - yield tag - - -def _generic_tags(interpreter, py_version, abi, platforms): - for tag in (Tag(interpreter, abi, platform) for platform in platforms): - yield tag - if abi != "none": - tags = (Tag(interpreter, "none", platform_) for platform_ in platforms) - for tag in tags: - yield tag - - -def _py_interpreter_range(py_version): - """ - Yield Python versions in descending order. - - After the latest version, the major-only version will be yielded, and then - all following versions up to 'end'. - """ - yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1]) - yield "py{major}".format(major=py_version[0]) - for minor in range(py_version[1] - 1, -1, -1): - yield "py{major}{minor}".format(major=py_version[0], minor=minor) - - -def _independent_tags(interpreter, py_version, platforms): - """ - Return the sequence of tags that are consistent across implementations. - - The tags consist of: - - py*-none- - - -none-any - - py*-none-any - """ - for version in _py_interpreter_range(py_version): - for platform_ in platforms: - yield Tag(version, "none", platform_) - yield Tag(interpreter, "none", "any") - for version in _py_interpreter_range(py_version): - yield Tag(version, "none", "any") - - -def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): - if not is_32bit: - return arch - - if arch.startswith("ppc"): - return "ppc" - - return "i386" - - -def _mac_binary_formats(version, cpu_arch): - formats = [cpu_arch] - if cpu_arch == "x86_64": - if version < (10, 4): - return [] - formats.extend(["intel", "fat64", "fat32"]) - - elif cpu_arch == "i386": - if version < (10, 4): - return [] - formats.extend(["intel", "fat32", "fat"]) - - elif cpu_arch == "ppc64": - # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? - if version > (10, 5) or version < (10, 4): - return [] - formats.append("fat64") - - elif cpu_arch == "ppc": - if version > (10, 6): - return [] - formats.extend(["fat32", "fat"]) - - formats.append("universal") - return formats - - -def _mac_platforms(version=None, arch=None): - version_str, _, cpu_arch = platform.mac_ver() - if version is None: - version = tuple(map(int, version_str.split(".")[:2])) - if arch is None: - arch = _mac_arch(cpu_arch) - platforms = [] - for minor_version in range(version[1], -1, -1): - compat_version = version[0], minor_version - binary_formats = _mac_binary_formats(compat_version, arch) - for binary_format in binary_formats: - platforms.append( - "macosx_{major}_{minor}_{binary_format}".format( - major=compat_version[0], - minor=compat_version[1], - binary_format=binary_format, - ) - ) - return platforms - - -# From PEP 513. -def _is_manylinux_compatible(name, glibc_version): - # Check for presence of _manylinux module. - try: - import _manylinux - - return bool(getattr(_manylinux, name + "_compatible")) - except (ImportError, AttributeError): - # Fall through to heuristic check below. - pass - - return _have_compatible_glibc(*glibc_version) - - -def _glibc_version_string(): - # Returns glibc version string, or None if not using glibc. - import ctypes - - # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen - # manpage says, "If filename is NULL, then the returned handle is for the - # main program". This way we can let the linker do the work to figure out - # which libc our process is actually using. - process_namespace = ctypes.CDLL(None) - try: - gnu_get_libc_version = process_namespace.gnu_get_libc_version - except AttributeError: - # Symbol doesn't exist -> therefore, we are not linked to - # glibc. - return None - - # Call gnu_get_libc_version, which returns a string like "2.5" - gnu_get_libc_version.restype = ctypes.c_char_p - version_str = gnu_get_libc_version() - # py2 / py3 compatibility: - if not isinstance(version_str, str): - version_str = version_str.decode("ascii") - - return version_str - - -# Separated out from have_compatible_glibc for easier unit testing. -def _check_glibc_version(version_str, required_major, minimum_minor): - # Parse string and check against requested version. - # - # We use a regexp instead of str.split because we want to discard any - # random junk that might come after the minor version -- this might happen - # in patched/forked versions of glibc (e.g. Linaro's version of glibc - # uses version strings like "2.20-2014.11"). See gh-3588. - m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) - if not m: - warnings.warn( - "Expected glibc version with 2 components major.minor," - " got: %s" % version_str, - RuntimeWarning, - ) - return False - return ( - int(m.group("major")) == required_major - and int(m.group("minor")) >= minimum_minor - ) - - -def _have_compatible_glibc(required_major, minimum_minor): - version_str = _glibc_version_string() - if version_str is None: - return False - return _check_glibc_version(version_str, required_major, minimum_minor) - - -def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): - linux = _normalize_string(distutils.util.get_platform()) - if linux == "linux_x86_64" and is_32bit: - linux = "linux_i686" - manylinux_support = ( - ("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599) - ("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571) - ("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513) - ) - manylinux_support_iter = iter(manylinux_support) - for name, glibc_version in manylinux_support_iter: - if _is_manylinux_compatible(name, glibc_version): - platforms = [linux.replace("linux", name)] - break - else: - platforms = [] - # Support for a later manylinux implies support for an earlier version. - platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter] - platforms.append(linux) - return platforms - - -def _generic_platforms(): - platform = _normalize_string(distutils.util.get_platform()) - return [platform] - - -def _interpreter_name(): - name = platform.python_implementation().lower() - return INTERPRETER_SHORT_NAMES.get(name) or name - - -def _generic_interpreter(name, py_version): - version = sysconfig.get_config_var("py_version_nodot") - if not version: - version = "".join(map(str, py_version[:2])) - return "{name}{version}".format(name=name, version=version) - - -def sys_tags(): - """ - Returns the sequence of tag triples for the running interpreter. - - The order of the sequence corresponds to priority order for the - interpreter, from most to least important. - """ - py_version = sys.version_info[:2] - interpreter_name = _interpreter_name() - if platform.system() == "Darwin": - platforms = _mac_platforms() - elif platform.system() == "Linux": - platforms = _linux_platforms() - else: - platforms = _generic_platforms() - - if interpreter_name == "cp": - interpreter = _cpython_interpreter(py_version) - abis = _cpython_abis(py_version) - for tag in _cpython_tags(py_version, interpreter, abis, platforms): - yield tag - elif interpreter_name == "pp": - interpreter = _pypy_interpreter() - abi = _generic_abi() - for tag in _pypy_tags(py_version, interpreter, abi, platforms): - yield tag - else: - interpreter = _generic_interpreter(interpreter_name, py_version) - abi = _generic_abi() - for tag in _generic_tags(interpreter, py_version, abi, platforms): - yield tag - for tag in _independent_tags(interpreter, py_version, platforms): - yield tag diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/utils.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/utils.py deleted file mode 100644 index 8841878..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/utils.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import re - -from .version import InvalidVersion, Version - - -_canonicalize_regex = re.compile(r"[-_.]+") - - -def canonicalize_name(name): - # This is taken from PEP 503. - return _canonicalize_regex.sub("-", name).lower() - - -def canonicalize_version(version): - """ - This is very similar to Version.__str__, but has one subtle differences - with the way it handles the release segment. - """ - - try: - version = Version(version) - except InvalidVersion: - # Legacy versions cannot be normalized - return version - - parts = [] - - # Epoch - if version.epoch != 0: - parts.append("{0}!".format(version.epoch)) - - # Release segment - # NB: This strips trailing '.0's to normalize - parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) - - # Pre-release - if version.pre is not None: - parts.append("".join(str(x) for x in version.pre)) - - # Post-release - if version.post is not None: - parts.append(".post{0}".format(version.post)) - - # Development release - if version.dev is not None: - parts.append(".dev{0}".format(version.dev)) - - # Local version segment - if version.local is not None: - parts.append("+{0}".format(version.local)) - - return "".join(parts) diff --git a/.tox/py37-normal/lib/python3.7/site-packages/packaging/version.py b/.tox/py37-normal/lib/python3.7/site-packages/packaging/version.py deleted file mode 100644 index 95157a1..0000000 --- a/.tox/py37-normal/lib/python3.7/site-packages/packaging/version.py +++ /dev/null @@ -1,420 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import re - -from ._structures import Infinity - - -__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] - - -_Version = collections.namedtuple( - "_Version", ["epoch", "release", "dev", "pre", "post", "local"] -) - - -def parse(version): - """ - Parse the given version string and return either a :class:`Version` object - or a :class:`LegacyVersion` object depending on if the given version is - a valid PEP 440 version or a legacy version. - """ - try: - return Version(version) - except InvalidVersion: - return LegacyVersion(version) - - -class InvalidVersion(ValueError): - """ - An invalid version was found, users should refer to PEP 440. - """ - - -class _BaseVersion(object): - def __hash__(self): - return hash(self._key) - - def __lt__(self, other): - return self._compare(other, lambda s, o: s < o) - - def __le__(self, other): - return self._compare(other, lambda s, o: s <= o) - - def __eq__(self, other): - return self._compare(other, lambda s, o: s == o) - - def __ge__(self, other): - return self._compare(other, lambda s, o: s >= o) - - def __gt__(self, other): - return self._compare(other, lambda s, o: s > o) - - def __ne__(self, other): - return self._compare(other, lambda s, o: s != o) - - def _compare(self, other, method): - if not isinstance(other, _BaseVersion): - return NotImplemented - - return method(self._key, other._key) - - -class LegacyVersion(_BaseVersion): - def __init__(self, version): - self._version = str(version) - self._key = _legacy_cmpkey(self._version) - - def __str__(self): - return self._version - - def __repr__(self): - return "".format(repr(str(self))) - - @property - def public(self): - return self._version - - @property - def base_version(self): - return self._version - - @property - def epoch(self): - return -1 - - @property - def release(self): - return None - - @property - def pre(self): - return None - - @property - def post(self): - return None - - @property - def dev(self): - return None - - @property - def local(self): - return None - - @property - def is_prerelease(self): - return False - - @property - def is_postrelease(self): - return False - - @property - def is_devrelease(self): - return False - - -_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) - -_legacy_version_replacement_map = { - "pre": "c", - "preview": "c", - "-": "final-", - "rc": "c", - "dev": "@", -} - - -def _parse_version_parts(s): - for part in _legacy_version_component_re.split(s): - part = _legacy_version_replacement_map.get(part, part) - - if not part or part == ".": - continue - - if part[:1] in "0123456789": - # pad for numeric comparison - yield part.zfill(8) - else: - yield "*" + part - - # ensure that alpha/beta/candidate are before final - yield "*final" - - -def _legacy_cmpkey(version): - # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch - # greater than or equal to 0. This will effectively put the LegacyVersion, - # which uses the defacto standard originally implemented by setuptools, - # as before all PEP 440 versions. - epoch = -1 - - # This scheme is taken from pkg_resources.parse_version setuptools prior to - # it's adoption of the packaging library. - parts = [] - for part in _parse_version_parts(version.lower()): - if part.startswith("*"): - # remove "-" before a prerelease tag - if part < "*final": - while parts and parts[-1] == "*final-": - parts.pop() - - # remove trailing zeros from each series of numeric parts - while parts and parts[-1] == "00000000": - parts.pop() - - parts.append(part) - parts = tuple(parts) - - return epoch, parts - - -# Deliberately not anchored to the start and end of the string, to make it -# easier for 3rd party code to reuse -VERSION_PATTERN = r""" - v? - (?: - (?:(?P[0-9]+)!)? # epoch - (?P[0-9]+(?:\.[0-9]+)*) # release segment - (?P
                                          # pre-release
-            [-_\.]?
-            (?P(a|b|c|rc|alpha|beta|pre|preview))
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-        (?P                                         # post release
-            (?:-(?P[0-9]+))
-            |
-            (?:
-                [-_\.]?
-                (?Ppost|rev|r)
-                [-_\.]?
-                (?P[0-9]+)?
-            )
-        )?
-        (?P                                          # dev release
-            [-_\.]?
-            (?Pdev)
-            [-_\.]?
-            (?P[0-9]+)?
-        )?
-    )
-    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-"""
-
-
-class Version(_BaseVersion):
-
-    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
-
-    def __init__(self, version):
-        # Validate the version and parse it into pieces
-        match = self._regex.search(version)
-        if not match:
-            raise InvalidVersion("Invalid version: '{0}'".format(version))
-
-        # Store the parsed out pieces of the version
-        self._version = _Version(
-            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
-            release=tuple(int(i) for i in match.group("release").split(".")),
-            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
-            post=_parse_letter_version(
-                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
-            ),
-            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
-            local=_parse_local_version(match.group("local")),
-        )
-
-        # Generate a key which will be used for sorting
-        self._key = _cmpkey(
-            self._version.epoch,
-            self._version.release,
-            self._version.pre,
-            self._version.post,
-            self._version.dev,
-            self._version.local,
-        )
-
-    def __repr__(self):
-        return "".format(repr(str(self)))
-
-    def __str__(self):
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append("{0}!".format(self.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        # Pre-release
-        if self.pre is not None:
-            parts.append("".join(str(x) for x in self.pre))
-
-        # Post-release
-        if self.post is not None:
-            parts.append(".post{0}".format(self.post))
-
-        # Development release
-        if self.dev is not None:
-            parts.append(".dev{0}".format(self.dev))
-
-        # Local version segment
-        if self.local is not None:
-            parts.append("+{0}".format(self.local))
-
-        return "".join(parts)
-
-    @property
-    def epoch(self):
-        return self._version.epoch
-
-    @property
-    def release(self):
-        return self._version.release
-
-    @property
-    def pre(self):
-        return self._version.pre
-
-    @property
-    def post(self):
-        return self._version.post[1] if self._version.post else None
-
-    @property
-    def dev(self):
-        return self._version.dev[1] if self._version.dev else None
-
-    @property
-    def local(self):
-        if self._version.local:
-            return ".".join(str(x) for x in self._version.local)
-        else:
-            return None
-
-    @property
-    def public(self):
-        return str(self).split("+", 1)[0]
-
-    @property
-    def base_version(self):
-        parts = []
-
-        # Epoch
-        if self.epoch != 0:
-            parts.append("{0}!".format(self.epoch))
-
-        # Release segment
-        parts.append(".".join(str(x) for x in self.release))
-
-        return "".join(parts)
-
-    @property
-    def is_prerelease(self):
-        return self.dev is not None or self.pre is not None
-
-    @property
-    def is_postrelease(self):
-        return self.post is not None
-
-    @property
-    def is_devrelease(self):
-        return self.dev is not None
-
-
-def _parse_letter_version(letter, number):
-    if letter:
-        # We consider there to be an implicit 0 in a pre-release if there is
-        # not a numeral associated with it.
-        if number is None:
-            number = 0
-
-        # We normalize any letters to their lower case form
-        letter = letter.lower()
-
-        # We consider some words to be alternate spellings of other words and
-        # in those cases we want to normalize the spellings to our preferred
-        # spelling.
-        if letter == "alpha":
-            letter = "a"
-        elif letter == "beta":
-            letter = "b"
-        elif letter in ["c", "pre", "preview"]:
-            letter = "rc"
-        elif letter in ["rev", "r"]:
-            letter = "post"
-
-        return letter, int(number)
-    if not letter and number:
-        # We assume if we are given a number, but we are not given a letter
-        # then this is using the implicit post release syntax (e.g. 1.0-1)
-        letter = "post"
-
-        return letter, int(number)
-
-
-_local_version_separators = re.compile(r"[\._-]")
-
-
-def _parse_local_version(local):
-    """
-    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
-    """
-    if local is not None:
-        return tuple(
-            part.lower() if not part.isdigit() else int(part)
-            for part in _local_version_separators.split(local)
-        )
-
-
-def _cmpkey(epoch, release, pre, post, dev, local):
-    # When we compare a release version, we want to compare it with all of the
-    # trailing zeros removed. So we'll use a reverse the list, drop all the now
-    # leading zeros until we come to something non zero, then take the rest
-    # re-reverse it back into the correct order and make it a tuple and use
-    # that for our sorting key.
-    release = tuple(
-        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
-    )
-
-    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
-    # We'll do this by abusing the pre segment, but we _only_ want to do this
-    # if there is not a pre or a post segment. If we have one of those then
-    # the normal sorting rules will handle this case correctly.
-    if pre is None and post is None and dev is not None:
-        pre = -Infinity
-    # Versions without a pre-release (except as noted above) should sort after
-    # those with one.
-    elif pre is None:
-        pre = Infinity
-
-    # Versions without a post segment should sort before those with one.
-    if post is None:
-        post = -Infinity
-
-    # Versions without a development segment should sort after those with one.
-    if dev is None:
-        dev = Infinity
-
-    if local is None:
-        # Versions without a local segment should sort before those with one.
-        local = -Infinity
-    else:
-        # Versions with a local segment need that segment parsed to implement
-        # the sorting rules in PEP440.
-        # - Alpha numeric segments sort before numeric segments
-        # - Alpha numeric segments sort lexicographically
-        # - Numeric segments sort numerically
-        # - Shorter versions sort before longer versions when the prefixes
-        #   match exactly
-        local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local)
-
-    return epoch, release, pre, post, dev, local
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/INSTALLER b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/INSTALLER
deleted file mode 100644
index a1b589e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/INSTALLER
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/LICENSE.txt b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/LICENSE.txt
deleted file mode 100644
index 737fec5..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/LICENSE.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file)
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/METADATA b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/METADATA
deleted file mode 100644
index 52ebaf5..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/METADATA
+++ /dev/null
@@ -1,81 +0,0 @@
-Metadata-Version: 2.1
-Name: pip
-Version: 19.3
-Summary: The PyPA recommended tool for installing Python packages.
-Home-page: https://pip.pypa.io/
-Author: The pip developers
-Author-email: pypa-dev@groups.google.com
-License: MIT
-Keywords: distutils easy_install egg setuptools wheel virtualenv
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: MIT License
-Classifier: Topic :: Software Development :: Build Tools
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.5
-Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: Implementation :: CPython
-Classifier: Programming Language :: Python :: Implementation :: PyPy
-Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*
-
-pip - The Python Package Installer
-==================================
-
-.. image:: https://img.shields.io/pypi/v/pip.svg
-   :target: https://pypi.org/project/pip/
-
-.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
-   :target: https://pip.pypa.io/en/latest
-
-pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
-
-Please take a look at our documentation for how to install and use pip:
-
-* `Installation`_
-* `Usage`_
-
-Updates are released regularly, with a new version every 3 months. More details can be found in our documentation:
-
-* `Release notes`_
-* `Release process`_
-
-If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
-
-* `Issue tracking`_
-* `Discourse channel`_
-* `User IRC`_
-
-If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
-
-* `GitHub page`_
-* `Dev documentation`_
-* `Dev mailing list`_
-* `Dev IRC`_
-
-Code of Conduct
----------------
-
-Everyone interacting in the pip project's codebases, issue trackers, chat
-rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
-
-.. _package installer: https://packaging.python.org/en/latest/current/
-.. _Python Package Index: https://pypi.org
-.. _Installation: https://pip.pypa.io/en/stable/installing.html
-.. _Usage: https://pip.pypa.io/en/stable/
-.. _Release notes: https://pip.pypa.io/en/stable/news.html
-.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
-.. _GitHub page: https://github.com/pypa/pip
-.. _Dev documentation: https://pip.pypa.io/en/latest/development
-.. _Issue tracking: https://github.com/pypa/pip/issues
-.. _Discourse channel: https://discuss.python.org/c/packaging
-.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
-.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
-.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
-.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
-
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/RECORD b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/RECORD
deleted file mode 100644
index 5c524c8..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/RECORD
+++ /dev/null
@@ -1,670 +0,0 @@
-../../../bin/pip,sha256=RNEV_-gnqpIh0Fl1juH_x7Fqf0qY8ozVlLpv83ODxLk,260
-../../../bin/pip3,sha256=RNEV_-gnqpIh0Fl1juH_x7Fqf0qY8ozVlLpv83ODxLk,260
-../../../bin/pip3.7,sha256=RNEV_-gnqpIh0Fl1juH_x7Fqf0qY8ozVlLpv83ODxLk,260
-pip-19.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-pip-19.3.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
-pip-19.3.dist-info/METADATA,sha256=o2Fy95hGNb3jvUeUR8z7anTqS867qjZTyIhf-ZS5RVg,3193
-pip-19.3.dist-info/RECORD,,
-pip-19.3.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
-pip-19.3.dist-info/entry_points.txt,sha256=UgRCnURjFVRYG8T_Od_EGhoGPr0vPR8xGqp3_CdY2RY,113
-pip-19.3.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
-pip/__init__.py,sha256=sL2z8RzC86y_CASwQsBC5Y4j-ElVDBqFW6LAssg1QnY,21
-pip/__main__.py,sha256=JhJZiiWUmlPTwFOZ7_L68hWG4nUVQTDTtwc1o_yWdWw,628
-pip/__pycache__/__init__.cpython-37.pyc,,
-pip/__pycache__/__main__.cpython-37.pyc,,
-pip/_internal/__init__.py,sha256=4lqJ4waZxiP4QFg34znQ3j5zdyDsrWZkERPeLAs7lT8,80
-pip/_internal/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/__pycache__/build_env.cpython-37.pyc,,
-pip/_internal/__pycache__/cache.cpython-37.pyc,,
-pip/_internal/__pycache__/collector.cpython-37.pyc,,
-pip/_internal/__pycache__/configuration.cpython-37.pyc,,
-pip/_internal/__pycache__/download.cpython-37.pyc,,
-pip/_internal/__pycache__/exceptions.cpython-37.pyc,,
-pip/_internal/__pycache__/index.cpython-37.pyc,,
-pip/_internal/__pycache__/legacy_resolve.cpython-37.pyc,,
-pip/_internal/__pycache__/locations.cpython-37.pyc,,
-pip/_internal/__pycache__/main.cpython-37.pyc,,
-pip/_internal/__pycache__/pep425tags.cpython-37.pyc,,
-pip/_internal/__pycache__/pyproject.cpython-37.pyc,,
-pip/_internal/__pycache__/self_outdated_check.cpython-37.pyc,,
-pip/_internal/__pycache__/wheel.cpython-37.pyc,,
-pip/_internal/build_env.py,sha256=h8_IrSllK1aEv4cmoO9G7l5XDrwMeq2aADy6EWqbEKs,7517
-pip/_internal/cache.py,sha256=tv5gyBOh4gx8my0ikxCEHzg92Y5D8mCbcE4lq9rQLhw,8371
-pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
-pip/_internal/cli/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/autocompletion.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/base_command.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/cmdoptions.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/command_context.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/main_parser.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/parser.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/req_command.cpython-37.pyc,,
-pip/_internal/cli/__pycache__/status_codes.cpython-37.pyc,,
-pip/_internal/cli/autocompletion.py,sha256=KLBLfPkMKgkl6FS6RGlYlgRmdxFiwvdefv3Ywt5JqzM,6169
-pip/_internal/cli/base_command.py,sha256=Xg0kP33AsjMYqXmFoZ-HX6-hRM5QaKDXAJRXLO03IVU,6504
-pip/_internal/cli/cmdoptions.py,sha256=y6x_kbx-obAF2i6rO0cZR2HOi5SC31LKxl0ZjQV6uOs,26839
-pip/_internal/cli/command_context.py,sha256=F0hZ0Xm8NZAgcOpl5J46OpjvAhIgx5P1nZHGaAAxlmc,796
-pip/_internal/cli/main_parser.py,sha256=W9OWeryh7ZkqELohaFh0Ko9sB98ZkSeDmnYbOZ1imBc,2819
-pip/_internal/cli/parser.py,sha256=O9djTuYQuSfObiY-NU6p4MJCfWsRUnDpE2YGA_fwols,9487
-pip/_internal/cli/req_command.py,sha256=J1fTDjVZAExZDMBEqwJQ-NgNlab1g8zE1nZND-6TnP0,11363
-pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
-pip/_internal/collector.py,sha256=zGSX67z_CN8uGIxxseKTIV2TC1QN1AE9mwBPvodTxak,18007
-pip/_internal/commands/__init__.py,sha256=uTSj58QlrSKeXqCUSdL-eAf_APzx5BHy1ABxb0j5ZNE,3714
-pip/_internal/commands/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/check.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/completion.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/configuration.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/debug.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/download.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/freeze.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/hash.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/help.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/install.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/list.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/search.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/show.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/uninstall.cpython-37.pyc,,
-pip/_internal/commands/__pycache__/wheel.cpython-37.pyc,,
-pip/_internal/commands/check.py,sha256=mgLNYT3bd6Kmynwh4zzcBmVlFZ-urMo40jTgk6U405E,1505
-pip/_internal/commands/completion.py,sha256=UFQvq0Q4_B96z1bvnQyMOq82aPSu05RejbLmqeTZjC0,2975
-pip/_internal/commands/configuration.py,sha256=6riioZjMhsNSEct7dE-X8SobGodk3WERKJvuyjBje4Q,7226
-pip/_internal/commands/debug.py,sha256=m9Ap_dd5jd7W6FbE9_t5495SAEvUC1vnDZ2m-TYXsQ4,3394
-pip/_internal/commands/download.py,sha256=J6hxq7dO8Ld751WYueAdC18xiGhi27B6lrdOefFxwZo,5577
-pip/_internal/commands/freeze.py,sha256=G9I_yoBHlpWLX1qItsWNOmmqc8ET7pekFybdbV333d4,3464
-pip/_internal/commands/hash.py,sha256=47teimfAPhpkaVbSDaafck51BT3XXYuL83lAqc5lOcE,1735
-pip/_internal/commands/help.py,sha256=Nhecq--ydFn80Gm1Zvbf9943EcRJfO0TnXUhsF0RO7s,1181
-pip/_internal/commands/install.py,sha256=rt2NI4ZnOf-zBf0eTqGSyvPdq4q8oDLcfolYNmlPqDA,23803
-pip/_internal/commands/list.py,sha256=wQijUCcLQmME_ryAk1EqjBM4CNfF8FXt5XGC5o3lQjc,10548
-pip/_internal/commands/search.py,sha256=7Il8nKZ9mM7qF5jlnBoPvSIFY9f-0-5IbYoX3miTuZY,5148
-pip/_internal/commands/show.py,sha256=Vzsj2oX0JBl94MPyF3LV8YoMcigl8B2UsMM8zp0pH2s,6792
-pip/_internal/commands/uninstall.py,sha256=8mldFbrQecSoWDZRqxBgJkrlvx6Y9Iy7cs-2BIgtXt4,2983
-pip/_internal/commands/wheel.py,sha256=cDwoMefUjOdLqE29rQGYwvdzwvsV0j5Ac7pCszbqaK8,6408
-pip/_internal/configuration.py,sha256=ug70zCMs2OYEgef_6uebyXWDknbY-hz5cSaL9CpLYkI,14218
-pip/_internal/distributions/__init__.py,sha256=plnByCzRAZJL98kuchZ4PoRKxseDWM7wBfjvb2_fMw8,967
-pip/_internal/distributions/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/distributions/__pycache__/base.cpython-37.pyc,,
-pip/_internal/distributions/__pycache__/installed.cpython-37.pyc,,
-pip/_internal/distributions/__pycache__/wheel.cpython-37.pyc,,
-pip/_internal/distributions/base.py,sha256=91UqVXWnIyQPpIQed4nAuYWkGY665aYIV1EjS-CkBEg,1108
-pip/_internal/distributions/installed.py,sha256=4bZq8NqgXgPqX-yTz94WzTRvobjwDnxVnFCL24FjAWA,542
-pip/_internal/distributions/source/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_internal/distributions/source/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/distributions/source/__pycache__/legacy.cpython-37.pyc,,
-pip/_internal/distributions/source/legacy.py,sha256=8QbAup0CBA5iPsSAJgxZyz1tQxO1C6hvfFpTlCJnyKE,3941
-pip/_internal/distributions/wheel.py,sha256=0hkgI9NpkT4bCwA9RTsX2NJcz9MJyPhlh3f7Z3529sY,616
-pip/_internal/download.py,sha256=X7XcEO2L5-VlcRkwDZ-y5GMCmmnt33yoD6AP3qCi-lI,20297
-pip/_internal/exceptions.py,sha256=6YRuwXAK6F1iyUWKIkCIpWWN2khkAn1sZOgrFA9S8Ro,10247
-pip/_internal/index.py,sha256=P5WnrP8zyBK29EFDCcIsznQquTNn4ZL-cYP8L-NbfPU,36887
-pip/_internal/legacy_resolve.py,sha256=_lQXUAZT6CeYeatartpS-5SfNYCSRc5Dsx1jbvCESGg,17129
-pip/_internal/locations.py,sha256=n6KALLEJN-L99QFHPNdJl3nMktwcpmqMyyo6EXZoCL0,5414
-pip/_internal/main.py,sha256=ikuDY1c82JZivOvnSXJZN31IeeZF_BkNpgZZ90p6iJ4,1359
-pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
-pip/_internal/models/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/models/__pycache__/candidate.cpython-37.pyc,,
-pip/_internal/models/__pycache__/format_control.cpython-37.pyc,,
-pip/_internal/models/__pycache__/index.cpython-37.pyc,,
-pip/_internal/models/__pycache__/link.cpython-37.pyc,,
-pip/_internal/models/__pycache__/search_scope.cpython-37.pyc,,
-pip/_internal/models/__pycache__/selection_prefs.cpython-37.pyc,,
-pip/_internal/models/__pycache__/target_python.cpython-37.pyc,,
-pip/_internal/models/candidate.py,sha256=S5ipn5ZvnWFX_A84OGrtc4DAZTJM7GSnIyDIA8PwZWw,1277
-pip/_internal/models/format_control.py,sha256=bMcS0iTVLBXrYRDkGeGqYE3pTQUR96T7cGyMtLeH0Fs,2592
-pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060
-pip/_internal/models/link.py,sha256=MFYBc2ko2B--zFyk6JBnorkAdFpJ8lwozz-64YBdDO0,6860
-pip/_internal/models/search_scope.py,sha256=25LTMmqHvKxCpOsEH1qcb0Id-hqATsg2SVIjcxY702o,3971
-pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908
-pip/_internal/models/target_python.py,sha256=d66ljdpZZtAAQsuOytiZ7yq6spCa8GOmz5Vf7uoVZT0,3820
-pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
-pip/_internal/network/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/network/__pycache__/auth.cpython-37.pyc,,
-pip/_internal/network/__pycache__/cache.cpython-37.pyc,,
-pip/_internal/network/__pycache__/session.cpython-37.pyc,,
-pip/_internal/network/__pycache__/xmlrpc.cpython-37.pyc,,
-pip/_internal/network/auth.py,sha256=K3G1ukKb3PiH8w_UnpXTz8qQsTULO-qdbfOE9zTo1fE,11119
-pip/_internal/network/cache.py,sha256=WJafQhl9_gSxvEMvYN-ecsijGscKCVPsb09sEUbwL-E,2233
-pip/_internal/network/session.py,sha256=m2E-pkpE3nWVswt2JzKP9NANwfhf38SjDOUor0cxqd8,15842
-pip/_internal/network/xmlrpc.py,sha256=AL115M3vFJ8xiHVJneb8Hi0ZFeRvdPhblC89w25OG5s,1597
-pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_internal/operations/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/operations/__pycache__/check.cpython-37.pyc,,
-pip/_internal/operations/__pycache__/freeze.cpython-37.pyc,,
-pip/_internal/operations/__pycache__/generate_metadata.cpython-37.pyc,,
-pip/_internal/operations/__pycache__/prepare.cpython-37.pyc,,
-pip/_internal/operations/check.py,sha256=SG2AuZ4NFwUPAWo4OQOotKCn6OUxjkAgBdehNAo_2sM,5354
-pip/_internal/operations/freeze.py,sha256=MeHazCBo6DF0B2ay0t0CqRRfR4bETqKB-ANv4yIq4Bg,9826
-pip/_internal/operations/generate_metadata.py,sha256=oHJtWo0QpOt5xt5xQTfOEVNOpaPy9E0lhMCIE6Upidw,4699
-pip/_internal/operations/prepare.py,sha256=_TNie4ORlITMh04dkEQcBVDFkcz5FW67VjUfeJDRUcM,11279
-pip/_internal/pep425tags.py,sha256=mCZA3yvHjZfQGnQti2sNfzrinhLmDp0o9Fwb826Edj4,15941
-pip/_internal/pyproject.py,sha256=98g1oHmpdqraOqAJuoCDr2XP3QQuN5oEuoD0fWDtVhE,6490
-pip/_internal/req/__init__.py,sha256=iR73_MGlD4N2Dg86t2kooz7okpZgpAuZUSROrR9KkkE,2467
-pip/_internal/req/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/req/__pycache__/constructors.cpython-37.pyc,,
-pip/_internal/req/__pycache__/req_file.cpython-37.pyc,,
-pip/_internal/req/__pycache__/req_install.cpython-37.pyc,,
-pip/_internal/req/__pycache__/req_set.cpython-37.pyc,,
-pip/_internal/req/__pycache__/req_tracker.cpython-37.pyc,,
-pip/_internal/req/__pycache__/req_uninstall.cpython-37.pyc,,
-pip/_internal/req/constructors.py,sha256=DcUoVBQabogW6FoTxo_7FApvYTBk5LmiTtLOSi9Wh7Y,14388
-pip/_internal/req/req_file.py,sha256=Ip4KV67FLLexYq9xVGgzjbyjIQCP5W-RmkVZWFWV5AY,14294
-pip/_internal/req/req_install.py,sha256=NYPFJ7UTN1YS1m2twuxfXKQfSQnEwUWadkvk3Ez8o_s,36573
-pip/_internal/req/req_set.py,sha256=ykotCTON9_be3yzrkXs9yv986Tv-p-Sk5-q8tqgk6BY,8132
-pip/_internal/req/req_tracker.py,sha256=M5VZxoKrvDmwH2G2kk1QthJmhF19zCmKSXLY-WDLKWY,3195
-pip/_internal/req/req_uninstall.py,sha256=VyukcnrMTRob977YO6C5t9CVadmZIF24Yj30Hfz4lpc,23590
-pip/_internal/self_outdated_check.py,sha256=0RuTI8jXNm_CK7ZV8huRUqtabE0SQgwJSogapEti_zo,7934
-pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_internal/utils/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/appdirs.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/compat.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/deprecation.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/encoding.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/filesystem.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/filetypes.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/glibc.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/hashes.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/inject_securetransport.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/logging.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/marker_files.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/misc.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/models.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/packaging.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/setuptools_build.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/subprocess.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/temp_dir.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/typing.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/ui.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/unpacking.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/urls.cpython-37.pyc,,
-pip/_internal/utils/__pycache__/virtualenv.cpython-37.pyc,,
-pip/_internal/utils/appdirs.py,sha256=hchP3vBMefwp3Jr05IqN6cssVlbzYPTj062hJzBDhA4,9766
-pip/_internal/utils/compat.py,sha256=i9XoohQwMK73PwN0Nkan6DfiG47rdyllTXvX3WJO30c,9565
-pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318
-pip/_internal/utils/encoding.py,sha256=hxZz0t3Whw3d4MHQEiofxalTlfKwxFdLc8fpeGfhKo8,1320
-pip/_internal/utils/filesystem.py,sha256=TG_hHd0BZkYih17f4z4wEi0XXheo5TE-UnAQB964Pf0,3334
-pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571
-pip/_internal/utils/glibc.py,sha256=HXnQNSGfrldTYbF6V6asT86h6QQKPwumYok1MFyQDnM,4397
-pip/_internal/utils/hashes.py,sha256=XXj0SZfz4piN8XgqloGzPnfQYGTcSZZDtuqa1zWCts8,4020
-pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810
-pip/_internal/utils/logging.py,sha256=aJL7NldPhS5KGFof6Qt3o3MG5cjm5TOoo7bGRu9_wsg,13033
-pip/_internal/utils/marker_files.py,sha256=ktYfV9ccPKzVREiWlmTveiKOztk0L7F2zXi2ob2lymM,823
-pip/_internal/utils/misc.py,sha256=4klI9CWDMj5zJeg171uJIfqqU24MSEJqxGrMbe5xwdw,25249
-pip/_internal/utils/models.py,sha256=IA0hw_T4awQzui0kqfIEASm5yLtgZAB08ag59Nip5G8,1148
-pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035
-pip/_internal/utils/setuptools_build.py,sha256=vHAmalU_IcDvU7ioRioVspEyVD2N4_NPDSbtHOoow8g,1631
-pip/_internal/utils/subprocess.py,sha256=M3oBdbCSxDhmQSWYRZo0PTzGuyYvJaNIzt3DVCmtjJI,9911
-pip/_internal/utils/temp_dir.py,sha256=fFNkfrE3MlyjZjoLzeX9A2AkjUWEEEoP-8ObyX3BSnE,5521
-pip/_internal/utils/typing.py,sha256=bF73ImJzIaxLLEVwfEaSJzFGqV9LaxkQBvDULIyr1jI,1125
-pip/_internal/utils/ui.py,sha256=SmwDgg45shGo0wVJSK3MUZUQtM4DNGdQntE3n2G97yk,13906
-pip/_internal/utils/unpacking.py,sha256=M944JTSiapBOSKLWu7lbawpVHSE7flfzZTEr3TAG7v8,9438
-pip/_internal/utils/urls.py,sha256=aNV9wq5ClUmrz6sG-al7hEWJ4ToitOy7l82CmFGFNW8,1481
-pip/_internal/utils/virtualenv.py,sha256=oSTrUMQUqmuXcDvQZGwV65w-hlvhBAqyQiWRxLf8fN0,891
-pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617
-pip/_internal/vcs/__pycache__/__init__.cpython-37.pyc,,
-pip/_internal/vcs/__pycache__/bazaar.cpython-37.pyc,,
-pip/_internal/vcs/__pycache__/git.cpython-37.pyc,,
-pip/_internal/vcs/__pycache__/mercurial.cpython-37.pyc,,
-pip/_internal/vcs/__pycache__/subversion.cpython-37.pyc,,
-pip/_internal/vcs/__pycache__/versioncontrol.cpython-37.pyc,,
-pip/_internal/vcs/bazaar.py,sha256=84q1-kj1_nJ9AMzMu8RmMp-riRZu81M7K9kowcYgi3U,3957
-pip/_internal/vcs/git.py,sha256=3KFP0-GnYM8FX-Obwo3XjE4JW_YEl4HVERDSSsjf4Ck,13274
-pip/_internal/vcs/mercurial.py,sha256=2mg7BdYI_Fe00fF6omaNccFQLPHBsDBG5CAEzvqn5sA,5110
-pip/_internal/vcs/subversion.py,sha256=Fpwy71AmuqXnoKi6h1SrXRtPjEMn8fieuM1O4j01IBg,12292
-pip/_internal/vcs/versioncontrol.py,sha256=HiWHloaYAo4UWGxJGZhG0nO0PWS513H6ACiqT0z5Yh0,21374
-pip/_internal/wheel.py,sha256=emOltdF4VmpsSabfIBbtlCcozviIOnUwIdB0D3_VMrQ,43080
-pip/_vendor/__init__.py,sha256=gEJYEfJm7XGLslyjW3KBQyQxyTYxdvTEkRT5Bz28MDs,4657
-pip/_vendor/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/__pycache__/appdirs.cpython-37.pyc,,
-pip/_vendor/__pycache__/contextlib2.cpython-37.pyc,,
-pip/_vendor/__pycache__/distro.cpython-37.pyc,,
-pip/_vendor/__pycache__/ipaddress.cpython-37.pyc,,
-pip/_vendor/__pycache__/pyparsing.cpython-37.pyc,,
-pip/_vendor/__pycache__/retrying.cpython-37.pyc,,
-pip/_vendor/__pycache__/six.cpython-37.pyc,,
-pip/_vendor/appdirs.py,sha256=BENKsvcA08IpccD9345-rMrg3aXWFA1q6BFEglnHg6I,24547
-pip/_vendor/cachecontrol/__init__.py,sha256=6cRPchVqkAkeUtYTSW8qCetjSqJo-GxP-n4VMVDbvmc,302
-pip/_vendor/cachecontrol/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/adapter.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/cache.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/controller.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/serialize.cpython-37.pyc,,
-pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-37.pyc,,
-pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295
-pip/_vendor/cachecontrol/adapter.py,sha256=eBGAtVNRZgtl_Kj5JV54miqL9YND-D0JZPahwY8kFtY,4863
-pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
-pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
-pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-37.pyc,,
-pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-37.pyc,,
-pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
-pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856
-pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695
-pip/_vendor/cachecontrol/controller.py,sha256=U7g-YwizQ2O5NRgK_MZreF1ntM4E49C3PuF3od-Vwz4,13698
-pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
-pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
-pip/_vendor/cachecontrol/serialize.py,sha256=GebE34fgToyWwAsRPguh8hEPN6CqoG-5hRMXRsjVABQ,6954
-pip/_vendor/cachecontrol/wrapper.py,sha256=sfr9YHWx-5TwNz1H5rT6QOo8ggII6v3vbEDjQFwR6wc,671
-pip/_vendor/certifi/__init__.py,sha256=WFoavXHhpX-BZ5kbvyinZTbhLsqPJypLKIZu29nUsQg,52
-pip/_vendor/certifi/__main__.py,sha256=NaCn6WtWME-zzVWQ2j4zFyl8cY4knDa9CwtHNIeFPhM,53
-pip/_vendor/certifi/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/certifi/__pycache__/__main__.cpython-37.pyc,,
-pip/_vendor/certifi/__pycache__/core.cpython-37.pyc,,
-pip/_vendor/certifi/cacert.pem,sha256=cVC1b0T-OcQzgdcRql2yMxT7O08O6pcJHnuO9nbLLn0,278533
-pip/_vendor/certifi/core.py,sha256=EuFc2BsToG5O1-qsx4BSjQ1r1-7WRtH87b1WflZOWhI,218
-pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559
-pip/_vendor/chardet/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/big5freq.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/big5prober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/chardistribution.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/charsetprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/cp949prober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/enums.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/escprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/escsm.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/eucjpprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/euckrfreq.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/euckrprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/euctwfreq.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/euctwprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/gb2312freq.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/gb2312prober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/hebrewprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/jisfreq.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/jpcntx.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langthaimodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/latin1prober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/mbcssm.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/sjisprober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/universaldetector.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/utf8prober.cpython-37.pyc,,
-pip/_vendor/chardet/__pycache__/version.cpython-37.pyc,,
-pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
-pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
-pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
-pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787
-pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
-pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
-pip/_vendor/chardet/cli/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-37.pyc,,
-pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774
-pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
-pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134
-pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
-pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
-pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
-pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
-pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
-pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
-pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
-pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
-pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
-pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
-pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
-pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
-pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
-pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
-pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839
-pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948
-pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688
-pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345
-pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592
-pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290
-pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102
-pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
-pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
-pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
-pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
-pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657
-pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546
-pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
-pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485
-pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
-pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242
-pip/_vendor/colorama/__init__.py,sha256=lJdY6COz9uM_pXwuk9oLr0fp8H8q2RrUqN16GKabvq4,239
-pip/_vendor/colorama/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/colorama/__pycache__/ansi.cpython-37.pyc,,
-pip/_vendor/colorama/__pycache__/ansitowin32.cpython-37.pyc,,
-pip/_vendor/colorama/__pycache__/initialise.cpython-37.pyc,,
-pip/_vendor/colorama/__pycache__/win32.cpython-37.pyc,,
-pip/_vendor/colorama/__pycache__/winterm.cpython-37.pyc,,
-pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524
-pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462
-pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915
-pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404
-pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438
-pip/_vendor/contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915
-pip/_vendor/distlib/__init__.py,sha256=SkHYPuEQNQF2a2Cr18rfZ-LQyDqwwizn8tJE4seXPgU,587
-pip/_vendor/distlib/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/database.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/index.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/locators.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/manifest.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/markers.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/metadata.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/resources.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/scripts.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/util.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/version.cpython-37.pyc,,
-pip/_vendor/distlib/__pycache__/wheel.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
-pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/__pycache__/misc.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-37.pyc,,
-pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
-pip/_vendor/distlib/_backport/shutil.py,sha256=VW1t3uYqUjWZH7jV-6QiimLhnldoV5uIpH4EuiT1jfw,25647
-pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
-pip/_vendor/distlib/_backport/sysconfig.py,sha256=JdJ9ztRy4Hc-b5-VS74x3nUtdEIVr_OBvMsIb8O2sjc,26964
-pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
-pip/_vendor/distlib/compat.py,sha256=xdNZmqFN5HwF30HjRn5M415pcC2kgXRBXn767xS8v-M,41404
-pip/_vendor/distlib/database.py,sha256=-KJH63AJ7hqjLtGCwOTrionhKr2Vsytdwkjyo8UdEco,51029
-pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
-pip/_vendor/distlib/locators.py,sha256=bqzEWP3Ad8UE3D1rmzW1pgzVTKkY4rDUA_EWIVYli54,51807
-pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
-pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
-pip/_vendor/distlib/metadata.py,sha256=OhbCKmf5lswE8unWBopI1hj7tRpHp4ZbFvU4d6aAEMM,40234
-pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
-pip/_vendor/distlib/scripts.py,sha256=W24OXnZUmgRX_XtDrVZdfc-Frf4X4_cybvhP87iR-QU,16290
-pip/_vendor/distlib/t32.exe,sha256=y8Yu3yao6zZrELYGIisxkhnQLOAOvpiXft8_Y9I8vyU,92672
-pip/_vendor/distlib/t64.exe,sha256=qt1MpKO2NLqU8t1lD1T0frfFm5zwHm3mz7pLvmJ2kMI,102912
-pip/_vendor/distlib/util.py,sha256=TvdqcwncBHaQbNw0jkXRvSZvt1fbdgE8HQW5wJwzvv4,59790
-pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
-pip/_vendor/distlib/w32.exe,sha256=f98Etq_1giFgIQxrEh-sOAeO8qVtWqpDbGxdUucJ6pw,89088
-pip/_vendor/distlib/w64.exe,sha256=6Hs-Wn0vXBHA6Qd76IlalqYXqrN80DCPpdoeIQzPRms,99840
-pip/_vendor/distlib/wheel.py,sha256=2lviV6L4IvTP5DRkKE0HGpClvdoTJQHZJLfTQ6dfn2A,40437
-pip/_vendor/distro.py,sha256=X2So5kjrRKyMbQJ90Xgy93HU5eFtujCzKaYNeoy1k1c,43251
-pip/_vendor/html5lib/__init__.py,sha256=Ztrn7UvF-wIFAgRBBa0ML-Gu5AffH3BPX_INJx4SaBI,1162
-pip/_vendor/html5lib/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/_inputstream.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/_utils.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/constants.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/html5parser.cpython-37.pyc,,
-pip/_vendor/html5lib/__pycache__/serializer.cpython-37.pyc,,
-pip/_vendor/html5lib/_ihatexml.py,sha256=3LBtJMlzgwM8vpQiU1TvGmEEmNH72sV0yD8yS53y07A,16705
-pip/_vendor/html5lib/_inputstream.py,sha256=bPUWcAfJScK4xkjQQaG_HsI2BvEVbFvI0AsodDYPQj0,32552
-pip/_vendor/html5lib/_tokenizer.py,sha256=YAaOEBD6qc5ISq9Xt9Nif1OFgcybTTfMdwqBkZhpAq4,76580
-pip/_vendor/html5lib/_trie/__init__.py,sha256=8VR1bcgD2OpeS2XExpu5yBhP_Q1K-lwKbBKICBPf1kU,289
-pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-37.pyc,,
-pip/_vendor/html5lib/_trie/__pycache__/datrie.cpython-37.pyc,,
-pip/_vendor/html5lib/_trie/__pycache__/py.cpython-37.pyc,,
-pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013
-pip/_vendor/html5lib/_trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178
-pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
-pip/_vendor/html5lib/_utils.py,sha256=ismpASeqa2jqEPQjHUj8vReAf7yIoKnvLN5fuOw6nv0,4015
-pip/_vendor/html5lib/constants.py,sha256=4lmZWLtEPRLnl8NzftOoYTJdo6jpeMtP6dqQC0g_bWQ,83518
-pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/base.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/lint.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-37.pyc,,
-pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919
-pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
-pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945
-pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643
-pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588
-pip/_vendor/html5lib/filters/sanitizer.py,sha256=4ON02KNjuqda1lCw5_JCUZxb0BzWR5M7ON84dtJ7dm0,26248
-pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214
-pip/_vendor/html5lib/html5parser.py,sha256=g5g2ezkusHxhi7b23vK_-d6K6BfIJRbqIQmvQ9z4EgI,118963
-pip/_vendor/html5lib/serializer.py,sha256=yfcfBHse2wDs6ojxn-kieJjLT5s1ipilQJ0gL3-rJis,15758
-pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679
-pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-37.pyc,,
-pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-37.pyc,,
-pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715
-pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776
-pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592
-pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-37.pyc,,
-pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-37.pyc,,
-pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-37.pyc,,
-pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-37.pyc,,
-pip/_vendor/html5lib/treebuilders/base.py,sha256=wQGp5yy22TNG8tJ6aREe4UUeTR7A99dEz0BXVaedWb4,14579
-pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925
-pip/_vendor/html5lib/treebuilders/etree.py,sha256=aqIBOGj_dFYqBURIcTegGNBhAIJOw5iFDHb4jrkYH-8,12764
-pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9V0dXxbJYYq-Skgb5-_OL2NkVYpjioEb4CHajo0e9yI,14122
-pip/_vendor/html5lib/treewalkers/__init__.py,sha256=yhXxHpjlSqfQyUag3v8-vWjMPriFBU8YRAPNpDgBTn8,5714
-pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-37.pyc,,
-pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476
-pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413
-pip/_vendor/html5lib/treewalkers/etree.py,sha256=sz1o6mmE93NQ53qJFDO7HKyDtuwgK-Ay3qSFZPC6u00,4550
-pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=sY6wfRshWTllu6n48TPWpKsQRPp-0CQrT0hj_AdzHSU,6309
-pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309
-pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58
-pip/_vendor/idna/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/codec.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/core.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/idnadata.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/intranges.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/package_data.cpython-37.pyc,,
-pip/_vendor/idna/__pycache__/uts46data.cpython-37.pyc,,
-pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299
-pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232
-pip/_vendor/idna/core.py,sha256=JDCZZ_PLESqIgEbU8mPyoEufWwoOiIqygA17-QZIe3s,11733
-pip/_vendor/idna/idnadata.py,sha256=HXaPFw6_YAJ0qppACPu0YLAULtRs3QovRM_CCZHGdY0,40899
-pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749
-pip/_vendor/idna/package_data.py,sha256=kIzeKKXEouXLR4srqwf9Q3zv-NffKSOz5aSDOJARPB0,21
-pip/_vendor/idna/uts46data.py,sha256=oLyNZ1pBaiBlj9zFzLFRd_P7J8MkRcgDisjExZR_4MY,198292
-pip/_vendor/ipaddress.py,sha256=2OgbkeAD2rLkcXqbcvof3J5R7lRwjNLoBySyTkBtKnc,79852
-pip/_vendor/msgpack/__init__.py,sha256=LnKzG5v0RyZgs7KlY2-SZYDBn-toylovXxKiXR6C-IQ,1535
-pip/_vendor/msgpack/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/msgpack/__pycache__/_version.cpython-37.pyc,,
-pip/_vendor/msgpack/__pycache__/exceptions.cpython-37.pyc,,
-pip/_vendor/msgpack/__pycache__/fallback.cpython-37.pyc,,
-pip/_vendor/msgpack/_version.py,sha256=72BxB5FMl1q3Nz1hteHINzHhrFpXQ9nNtULaK52NLk8,20
-pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
-pip/_vendor/msgpack/fallback.py,sha256=vXo6S67Dmil9mz0PRBCLDu6znpv6CGKt9WPCEsdZx2A,37454
-pip/_vendor/packaging/__about__.py,sha256=CpuMSyh1V7adw8QMjWKkY3LtdqRUkRX4MgJ6nF4stM0,744
-pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
-pip/_vendor/packaging/__pycache__/__about__.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/_compat.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/_structures.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/markers.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/requirements.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/specifiers.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/tags.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/utils.cpython-37.pyc,,
-pip/_vendor/packaging/__pycache__/version.cpython-37.pyc,,
-pip/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865
-pip/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416
-pip/_vendor/packaging/markers.py,sha256=jRoHXMzT_7InY31pBB9Nkx66COZpQBAwa5scHla9uVQ,8250
-pip/_vendor/packaging/requirements.py,sha256=grcnFU8x7KD230JaFLXtWl3VClLuOmsOy4c-m55tOWs,4700
-pip/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778
-pip/_vendor/packaging/tags.py,sha256=EPLXhO6GTD7_oiWEO1U0l0PkfR8R_xivpMDHXnsTlts,12933
-pip/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520
-pip/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978
-pip/_vendor/pep517/__init__.py,sha256=nCw8ZdLH4c19g8xP_Ndag1KPdQhlSDKaL9pg-X7uNWU,84
-pip/_vendor/pep517/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/_in_process.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/build.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/check.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/colorlog.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/dirtools.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/envbuild.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/meta.cpython-37.pyc,,
-pip/_vendor/pep517/__pycache__/wrappers.cpython-37.pyc,,
-pip/_vendor/pep517/_in_process.py,sha256=v1Viek27-MGCOFu8eSlLd2jGCrIqc1fISnutGFoRDps,7792
-pip/_vendor/pep517/build.py,sha256=WqM0-X4KyzY566qxGf3FeaYc1hw95H7YP0ElZ1zuTb0,3318
-pip/_vendor/pep517/check.py,sha256=ST02kRWBrRMOxgnRm9clw18Q2X7sJGaD4j3h6GmBhJ8,5949
-pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098
-pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780
-pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129
-pip/_vendor/pep517/envbuild.py,sha256=K4dIGAbkXf3RoQX_9RFpZvMvPrVSHtcbH7o9VSrNnlM,6024
-pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463
-pip/_vendor/pep517/wrappers.py,sha256=QiQaEQlfCrhRpPBFQiGVM9QjrKSlj8AvM39haoyfPRk,10599
-pip/_vendor/pkg_resources/__init__.py,sha256=hnT0Ph4iK40Ycr-OzSii_wZW5f7HCkP79E6Vf4cR3Vg,108237
-pip/_vendor/pkg_resources/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-37.pyc,,
-pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562
-pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857
-pip/_vendor/progress/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/progress/__pycache__/bar.cpython-37.pyc,,
-pip/_vendor/progress/__pycache__/counter.cpython-37.pyc,,
-pip/_vendor/progress/__pycache__/spinner.cpython-37.pyc,,
-pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854
-pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372
-pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380
-pip/_vendor/pyparsing.py,sha256=uJuFv_UoLmKOHlCU-EhIiKjYqYS2OCj-Gmr-sRkCzmU,263468
-pip/_vendor/pytoml/__init__.py,sha256=W_SKx36Hsew-Fty36BOpreLm4uF4V_Tgkm_z9rIoOE8,127
-pip/_vendor/pytoml/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/pytoml/__pycache__/core.cpython-37.pyc,,
-pip/_vendor/pytoml/__pycache__/parser.cpython-37.pyc,,
-pip/_vendor/pytoml/__pycache__/test.cpython-37.pyc,,
-pip/_vendor/pytoml/__pycache__/utils.cpython-37.pyc,,
-pip/_vendor/pytoml/__pycache__/writer.cpython-37.pyc,,
-pip/_vendor/pytoml/core.py,sha256=9CrLLTs1PdWjEwRnYzt_i4dhHcZvGxs_GsMlYAX3iY4,509
-pip/_vendor/pytoml/parser.py,sha256=qsc0NRnTgdFZgRp9gmr6D_KWFelrwxLkTj9dVxUcqS8,10309
-pip/_vendor/pytoml/test.py,sha256=2nQs4aX3XQEaaQCx6x_OJTS2Hb0_IiTZRqNOeDmLCzo,1021
-pip/_vendor/pytoml/utils.py,sha256=JCLHx77Hu1R3F-bRgiROIiKyCzLwyebnp5P35cRJxWs,1665
-pip/_vendor/pytoml/writer.py,sha256=4QQky9JSuRv60uzuhVZASU8T3CuobSkLG1285X6bDW8,3369
-pip/_vendor/requests/__init__.py,sha256=ONVsH6kJuPTV9nf-XVoubWsVX3qVtjCyju42kTW6Uug,4074
-pip/_vendor/requests/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/__version__.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/_internal_utils.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/adapters.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/api.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/auth.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/certs.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/compat.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/cookies.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/exceptions.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/help.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/hooks.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/models.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/packages.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/sessions.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/status_codes.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/structures.cpython-37.pyc,,
-pip/_vendor/requests/__pycache__/utils.cpython-37.pyc,,
-pip/_vendor/requests/__version__.py,sha256=Bm-GFstQaFezsFlnmEMrJDe8JNROz9n2XXYtODdvjjc,436
-pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
-pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548
-pip/_vendor/requests/api.py,sha256=fbUo11QoLOoNgWU6FfvNz8vMj9bE_cMmICXBa7TZHJs,6271
-pip/_vendor/requests/auth.py,sha256=QB2-cSUj1jrvWZfPXttsZpyAacQgtKLVk14vQW9TpSE,10206
-pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465
-pip/_vendor/requests/compat.py,sha256=FZX4Q_EMKiMnhZpZ3g_gOsT-j2ca9ij2gehDx1cwYeo,1941
-pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
-pip/_vendor/requests/exceptions.py,sha256=-mLam3TAx80V09EaH3H-ZxR61eAVuLRZ8zgBBSLjK44,3197
-pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578
-pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
-pip/_vendor/requests/models.py,sha256=6s-37iAqXVptq8z7U_LoH_pbIPrCQUm_Z8QuIGE29Q0,34275
-pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
-pip/_vendor/requests/sessions.py,sha256=DjbCotDW6xSAaBsjbW-L8l4N0UcwmrxVNgSrZgIjGWM,29332
-pip/_vendor/requests/status_codes.py,sha256=XWlcpBjbCtq9sSqpH9_KKxgnLTf9Z__wCWolq21ySlg,4129
-pip/_vendor/requests/structures.py,sha256=zoP8qly2Jak5e89HwpqjN1z2diztI-_gaqts1raJJBc,2981
-pip/_vendor/requests/utils.py,sha256=LtPJ1db6mJff2TJSJWKi7rBpzjPS3mSOrjC9zRhoD3A,30049
-pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972
-pip/_vendor/six.py,sha256=h9jch2pS86y4R36pKRS3LOYUCVFNIJMRwjZ4fJDtJ44,32452
-pip/_vendor/urllib3/__init__.py,sha256=cedaGRiXnA8WzcI3WPbb9u2Al9l2ortwfXZSQ4yFHHU,2683
-pip/_vendor/urllib3/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/_collections.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/connection.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/connectionpool.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/exceptions.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/fields.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/filepost.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/poolmanager.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/request.cpython-37.pyc,,
-pip/_vendor/urllib3/__pycache__/response.cpython-37.pyc,,
-pip/_vendor/urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792
-pip/_vendor/urllib3/connection.py,sha256=p7uUWh3cP0hgja8fFlzfZvwVxviJa8-C5cx9G3wQ5-o,15170
-pip/_vendor/urllib3/connectionpool.py,sha256=NYYFYX-L9XO4tQMU7ug5ABidsmGKVSE2_X4XMggzwxk,36446
-pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=tJvMXygi5UnFn4tmCHtrXOQIqy1FAfZoDDK36Q35F1I,707
-pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-37.pyc,,
-pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=ZuSEVZwiubk0oaVmkZa8bUoK9ACVJJhPVgRzPZN6KoQ,16805
-pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956
-pip/_vendor/urllib3/contrib/appengine.py,sha256=CosoKgcu5PE5COkPSGa7Q5AFzh9XWAf0PTBU7LSBE7A,11314
-pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=YnWc2-np7Rzi2LfCxJ2fEprhGkeZDSjQFdJuTQ5vuUE,4201
-pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=w35mWy_1POZUsbOhurVb_zhf0C1Jkd79AFlucLs6KuQ,16440
-pip/_vendor/urllib3/contrib/securetransport.py,sha256=_vByA8KzFPxn9x67hilAPu9wpoKDS9fU3K1DVavEk74,32742
-pip/_vendor/urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036
-pip/_vendor/urllib3/exceptions.py,sha256=_tiSmwYQ8em6OSr5GPO5hpbCG8q0DTIuJ2F7NMEyDyc,6610
-pip/_vendor/urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553
-pip/_vendor/urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440
-pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
-pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/__pycache__/six.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
-pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418
-pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
-pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688
-pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-37.pyc,,
-pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=EFrDbzAs-pV-Fm1HeuAqLl2mJ6MS23LwxYiukKrtf9U,5724
-pip/_vendor/urllib3/poolmanager.py,sha256=JYUyBUN3IiEknUdjZ7VJrpCQr6SP7vi0WwSndrn8XpE,17053
-pip/_vendor/urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018
-pip/_vendor/urllib3/response.py,sha256=O2DVzBeWOzyxZDZ8k0EDFU3GW1jWXk_b03mS0O1ybxs,27836
-pip/_vendor/urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038
-pip/_vendor/urllib3/util/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/connection.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/queue.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/request.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/response.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/retry.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/timeout.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/url.cpython-37.pyc,,
-pip/_vendor/urllib3/util/__pycache__/wait.cpython-37.pyc,,
-pip/_vendor/urllib3/util/connection.py,sha256=fOXAQ288KHaXmdTlDv5drwScDk9taQ9YzT42xegxzTg,4636
-pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497
-pip/_vendor/urllib3/util/request.py,sha256=_LJPqQXTYA_9c0syijP8Bmj72BeKNO9PXBk62oM7HQY,3821
-pip/_vendor/urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573
-pip/_vendor/urllib3/util/retry.py,sha256=Ui74h44gLIIWkAxT9SK3A2mEvu55-odWgJMw3LiUNGk,15450
-pip/_vendor/urllib3/util/ssl_.py,sha256=7mB3AsidIqLLq6gbeBL-7Ta0MyVOL5uZax8_5bH3y7c,14163
-pip/_vendor/urllib3/util/timeout.py,sha256=2g39u7rU68ilOcGhP1sVzySm4yWDbiY1LxCjsrCrMk8,9874
-pip/_vendor/urllib3/util/url.py,sha256=0peUq73QOfzYLL89pb-93SEMFRtjvQCISXx61c7qlF8,14247
-pip/_vendor/urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406
-pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
-pip/_vendor/webencodings/__pycache__/__init__.cpython-37.pyc,,
-pip/_vendor/webencodings/__pycache__/labels.cpython-37.pyc,,
-pip/_vendor/webencodings/__pycache__/mklabels.cpython-37.pyc,,
-pip/_vendor/webencodings/__pycache__/tests.cpython-37.pyc,,
-pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-37.pyc,,
-pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
-pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
-pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
-pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/WHEEL b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/WHEEL
deleted file mode 100644
index 8b701e9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/WHEEL
+++ /dev/null
@@ -1,6 +0,0 @@
-Wheel-Version: 1.0
-Generator: bdist_wheel (0.33.6)
-Root-Is-Purelib: true
-Tag: py2-none-any
-Tag: py3-none-any
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/entry_points.txt b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/entry_points.txt
deleted file mode 100644
index 4e3b05f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/entry_points.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-[console_scripts]
-pip = pip._internal.main:main
-pip3 = pip._internal.main:main
-pip3.7 = pip._internal.main:main
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/top_level.txt b/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/top_level.txt
deleted file mode 100644
index a1b589e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip-19.3.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-pip
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/__init__.py
deleted file mode 100644
index a24cb60..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__version__ = "19.3"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/__main__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/__main__.py
deleted file mode 100644
index 49b6fdf..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/__main__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from __future__ import absolute_import
-
-import os
-import sys
-
-# If we are running from a wheel, add the wheel to sys.path
-# This allows the usage python pip-*.whl/pip install pip-*.whl
-if __package__ == '':
-    # __file__ is pip-*.whl/pip/__main__.py
-    # first dirname call strips of '/__main__.py', second strips off '/pip'
-    # Resulting path is the name of the wheel itself
-    # Add that to sys.path so we can import pip
-    path = os.path.dirname(os.path.dirname(__file__))
-    sys.path.insert(0, path)
-
-from pip._internal.main import main as _main  # isort:skip # noqa
-
-if __name__ == '__main__':
-    sys.exit(_main())
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/__init__.py
deleted file mode 100644
index 8c0e4c5..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env python
-import pip._internal.utils.inject_securetransport  # noqa
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/build_env.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/build_env.py
deleted file mode 100644
index 5e6dc46..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/build_env.py
+++ /dev/null
@@ -1,221 +0,0 @@
-"""Build Environment used for isolation during sdist building
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import logging
-import os
-import sys
-import textwrap
-from collections import OrderedDict
-from distutils.sysconfig import get_python_lib
-from sysconfig import get_paths
-
-from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
-
-from pip import __file__ as pip_location
-from pip._internal.utils.subprocess import call_subprocess
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-
-if MYPY_CHECK_RUNNING:
-    from typing import Tuple, Set, Iterable, Optional, List
-    from pip._internal.index import PackageFinder
-
-logger = logging.getLogger(__name__)
-
-
-class _Prefix:
-
-    def __init__(self, path):
-        # type: (str) -> None
-        self.path = path
-        self.setup = False
-        self.bin_dir = get_paths(
-            'nt' if os.name == 'nt' else 'posix_prefix',
-            vars={'base': path, 'platbase': path}
-        )['scripts']
-        # Note: prefer distutils' sysconfig to get the
-        # library paths so PyPy is correctly supported.
-        purelib = get_python_lib(plat_specific=False, prefix=path)
-        platlib = get_python_lib(plat_specific=True, prefix=path)
-        if purelib == platlib:
-            self.lib_dirs = [purelib]
-        else:
-            self.lib_dirs = [purelib, platlib]
-
-
-class BuildEnvironment(object):
-    """Creates and manages an isolated environment to install build deps
-    """
-
-    def __init__(self):
-        # type: () -> None
-        self._temp_dir = TempDirectory(kind="build-env")
-
-        self._prefixes = OrderedDict((
-            (name, _Prefix(os.path.join(self._temp_dir.path, name)))
-            for name in ('normal', 'overlay')
-        ))
-
-        self._bin_dirs = []  # type: List[str]
-        self._lib_dirs = []  # type: List[str]
-        for prefix in reversed(list(self._prefixes.values())):
-            self._bin_dirs.append(prefix.bin_dir)
-            self._lib_dirs.extend(prefix.lib_dirs)
-
-        # Customize site to:
-        # - ensure .pth files are honored
-        # - prevent access to system site packages
-        system_sites = {
-            os.path.normcase(site) for site in (
-                get_python_lib(plat_specific=False),
-                get_python_lib(plat_specific=True),
-            )
-        }
-        self._site_dir = os.path.join(self._temp_dir.path, 'site')
-        if not os.path.exists(self._site_dir):
-            os.mkdir(self._site_dir)
-        with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
-            fp.write(textwrap.dedent(
-                '''
-                import os, site, sys
-
-                # First, drop system-sites related paths.
-                original_sys_path = sys.path[:]
-                known_paths = set()
-                for path in {system_sites!r}:
-                    site.addsitedir(path, known_paths=known_paths)
-                system_paths = set(
-                    os.path.normcase(path)
-                    for path in sys.path[len(original_sys_path):]
-                )
-                original_sys_path = [
-                    path for path in original_sys_path
-                    if os.path.normcase(path) not in system_paths
-                ]
-                sys.path = original_sys_path
-
-                # Second, add lib directories.
-                # ensuring .pth file are processed.
-                for path in {lib_dirs!r}:
-                    assert not path in sys.path
-                    site.addsitedir(path)
-                '''
-            ).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
-
-    def __enter__(self):
-        self._save_env = {
-            name: os.environ.get(name, None)
-            for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
-        }
-
-        path = self._bin_dirs[:]
-        old_path = self._save_env['PATH']
-        if old_path:
-            path.extend(old_path.split(os.pathsep))
-
-        pythonpath = [self._site_dir]
-
-        os.environ.update({
-            'PATH': os.pathsep.join(path),
-            'PYTHONNOUSERSITE': '1',
-            'PYTHONPATH': os.pathsep.join(pythonpath),
-        })
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        for varname, old_value in self._save_env.items():
-            if old_value is None:
-                os.environ.pop(varname, None)
-            else:
-                os.environ[varname] = old_value
-
-    def cleanup(self):
-        # type: () -> None
-        self._temp_dir.cleanup()
-
-    def check_requirements(self, reqs):
-        # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
-        """Return 2 sets:
-            - conflicting requirements: set of (installed, wanted) reqs tuples
-            - missing requirements: set of reqs
-        """
-        missing = set()
-        conflicting = set()
-        if reqs:
-            ws = WorkingSet(self._lib_dirs)
-            for req in reqs:
-                try:
-                    if ws.find(Requirement.parse(req)) is None:
-                        missing.add(req)
-                except VersionConflict as e:
-                    conflicting.add((str(e.args[0].as_requirement()),
-                                     str(e.args[1])))
-        return conflicting, missing
-
-    def install_requirements(
-        self,
-        finder,  # type: PackageFinder
-        requirements,  # type: Iterable[str]
-        prefix_as_string,  # type: str
-        message  # type: Optional[str]
-    ):
-        # type: (...) -> None
-        prefix = self._prefixes[prefix_as_string]
-        assert not prefix.setup
-        prefix.setup = True
-        if not requirements:
-            return
-        args = [
-            sys.executable, os.path.dirname(pip_location), 'install',
-            '--ignore-installed', '--no-user', '--prefix', prefix.path,
-            '--no-warn-script-location',
-        ]  # type: List[str]
-        if logger.getEffectiveLevel() <= logging.DEBUG:
-            args.append('-v')
-        for format_control in ('no_binary', 'only_binary'):
-            formats = getattr(finder.format_control, format_control)
-            args.extend(('--' + format_control.replace('_', '-'),
-                         ','.join(sorted(formats or {':none:'}))))
-
-        index_urls = finder.index_urls
-        if index_urls:
-            args.extend(['-i', index_urls[0]])
-            for extra_index in index_urls[1:]:
-                args.extend(['--extra-index-url', extra_index])
-        else:
-            args.append('--no-index')
-        for link in finder.find_links:
-            args.extend(['--find-links', link])
-
-        for host in finder.trusted_hosts:
-            args.extend(['--trusted-host', host])
-        if finder.allow_all_prereleases:
-            args.append('--pre')
-        args.append('--')
-        args.extend(requirements)
-        with open_spinner(message) as spinner:
-            call_subprocess(args, spinner=spinner)
-
-
-class NoOpBuildEnvironment(BuildEnvironment):
-    """A no-op drop-in replacement for BuildEnvironment
-    """
-
-    def __init__(self):
-        pass
-
-    def __enter__(self):
-        pass
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        pass
-
-    def cleanup(self):
-        pass
-
-    def install_requirements(self, finder, requirements, prefix, message):
-        raise NotImplementedError()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cache.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cache.py
deleted file mode 100644
index 8ebecbb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cache.py
+++ /dev/null
@@ -1,253 +0,0 @@
-"""Cache Management
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import errno
-import hashlib
-import logging
-import os
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.models.link import Link
-from pip._internal.utils.compat import expanduser
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url
-from pip._internal.wheel import InvalidWheelFilename, Wheel
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Set, List, Any
-    from pip._internal.index import FormatControl
-    from pip._internal.pep425tags import Pep425Tag
-
-logger = logging.getLogger(__name__)
-
-
-class Cache(object):
-    """An abstract class - provides cache directories for data from links
-
-
-        :param cache_dir: The root of the cache.
-        :param format_control: An object of FormatControl class to limit
-            binaries being read from the cache.
-        :param allowed_formats: which formats of files the cache should store.
-            ('binary' and 'source' are the only allowed values)
-    """
-
-    def __init__(self, cache_dir, format_control, allowed_formats):
-        # type: (str, FormatControl, Set[str]) -> None
-        super(Cache, self).__init__()
-        self.cache_dir = expanduser(cache_dir) if cache_dir else None
-        self.format_control = format_control
-        self.allowed_formats = allowed_formats
-
-        _valid_formats = {"source", "binary"}
-        assert self.allowed_formats.union(_valid_formats) == _valid_formats
-
-    def _get_cache_path_parts(self, link):
-        # type: (Link) -> List[str]
-        """Get parts of part that must be os.path.joined with cache_dir
-        """
-
-        # We want to generate an url to use as our cache key, we don't want to
-        # just re-use the URL because it might have other items in the fragment
-        # and we don't care about those.
-        key_parts = [link.url_without_fragment]
-        if link.hash_name is not None and link.hash is not None:
-            key_parts.append("=".join([link.hash_name, link.hash]))
-        key_url = "#".join(key_parts)
-
-        # Encode our key url with sha224, we'll use this because it has similar
-        # security properties to sha256, but with a shorter total output (and
-        # thus less secure). However the differences don't make a lot of
-        # difference for our use case here.
-        hashed = hashlib.sha224(key_url.encode()).hexdigest()
-
-        # We want to nest the directories some to prevent having a ton of top
-        # level directories where we might run out of sub directories on some
-        # FS.
-        parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
-
-        return parts
-
-    def _get_candidates(self, link, package_name):
-        # type: (Link, Optional[str]) -> List[Any]
-        can_not_cache = (
-            not self.cache_dir or
-            not package_name or
-            not link
-        )
-        if can_not_cache:
-            return []
-
-        canonical_name = canonicalize_name(package_name)
-        formats = self.format_control.get_allowed_formats(
-            canonical_name
-        )
-        if not self.allowed_formats.intersection(formats):
-            return []
-
-        root = self.get_path_for_link(link)
-        try:
-            return os.listdir(root)
-        except OSError as err:
-            if err.errno in {errno.ENOENT, errno.ENOTDIR}:
-                return []
-            raise
-
-    def get_path_for_link(self, link):
-        # type: (Link) -> str
-        """Return a directory to store cached items in for link.
-        """
-        raise NotImplementedError()
-
-    def get(
-        self,
-        link,            # type: Link
-        package_name,    # type: Optional[str]
-        supported_tags,  # type: List[Pep425Tag]
-    ):
-        # type: (...) -> Link
-        """Returns a link to a cached item if it exists, otherwise returns the
-        passed link.
-        """
-        raise NotImplementedError()
-
-    def _link_for_candidate(self, link, candidate):
-        # type: (Link, str) -> Link
-        root = self.get_path_for_link(link)
-        path = os.path.join(root, candidate)
-
-        return Link(path_to_url(path))
-
-    def cleanup(self):
-        # type: () -> None
-        pass
-
-
-class SimpleWheelCache(Cache):
-    """A cache of wheels for future installs.
-    """
-
-    def __init__(self, cache_dir, format_control):
-        # type: (str, FormatControl) -> None
-        super(SimpleWheelCache, self).__init__(
-            cache_dir, format_control, {"binary"}
-        )
-
-    def get_path_for_link(self, link):
-        # type: (Link) -> str
-        """Return a directory to store cached wheels for link
-
-        Because there are M wheels for any one sdist, we provide a directory
-        to cache them in, and then consult that directory when looking up
-        cache hits.
-
-        We only insert things into the cache if they have plausible version
-        numbers, so that we don't contaminate the cache with things that were
-        not unique. E.g. ./package might have dozens of installs done for it
-        and build a version of 0.0...and if we built and cached a wheel, we'd
-        end up using the same wheel even if the source has been edited.
-
-        :param link: The link of the sdist for which this will cache wheels.
-        """
-        parts = self._get_cache_path_parts(link)
-
-        # Store wheels within the root cache_dir
-        return os.path.join(self.cache_dir, "wheels", *parts)
-
-    def get(
-        self,
-        link,            # type: Link
-        package_name,    # type: Optional[str]
-        supported_tags,  # type: List[Pep425Tag]
-    ):
-        # type: (...) -> Link
-        candidates = []
-
-        for wheel_name in self._get_candidates(link, package_name):
-            try:
-                wheel = Wheel(wheel_name)
-            except InvalidWheelFilename:
-                continue
-            if not wheel.supported(supported_tags):
-                # Built for a different python/arch/etc
-                continue
-            candidates.append(
-                (wheel.support_index_min(supported_tags), wheel_name)
-            )
-
-        if not candidates:
-            return link
-
-        return self._link_for_candidate(link, min(candidates)[1])
-
-
-class EphemWheelCache(SimpleWheelCache):
-    """A SimpleWheelCache that creates it's own temporary cache directory
-    """
-
-    def __init__(self, format_control):
-        # type: (FormatControl) -> None
-        self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
-
-        super(EphemWheelCache, self).__init__(
-            self._temp_dir.path, format_control
-        )
-
-    def cleanup(self):
-        # type: () -> None
-        self._temp_dir.cleanup()
-
-
-class WheelCache(Cache):
-    """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
-
-    This Cache allows for gracefully degradation, using the ephem wheel cache
-    when a certain link is not found in the simple wheel cache first.
-    """
-
-    def __init__(self, cache_dir, format_control):
-        # type: (str, FormatControl) -> None
-        super(WheelCache, self).__init__(
-            cache_dir, format_control, {'binary'}
-        )
-        self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
-        self._ephem_cache = EphemWheelCache(format_control)
-
-    def get_path_for_link(self, link):
-        # type: (Link) -> str
-        return self._wheel_cache.get_path_for_link(link)
-
-    def get_ephem_path_for_link(self, link):
-        # type: (Link) -> str
-        return self._ephem_cache.get_path_for_link(link)
-
-    def get(
-        self,
-        link,            # type: Link
-        package_name,    # type: Optional[str]
-        supported_tags,  # type: List[Pep425Tag]
-    ):
-        # type: (...) -> Link
-        retval = self._wheel_cache.get(
-            link=link,
-            package_name=package_name,
-            supported_tags=supported_tags,
-        )
-        if retval is not link:
-            return retval
-
-        return self._ephem_cache.get(
-            link=link,
-            package_name=package_name,
-            supported_tags=supported_tags,
-        )
-
-    def cleanup(self):
-        # type: () -> None
-        self._wheel_cache.cleanup()
-        self._ephem_cache.cleanup()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
deleted file mode 100644
index e589bb9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-"""Subpackage containing all of pip's command line interface related code
-"""
-
-# This file intentionally does not import submodules
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
deleted file mode 100644
index 5440241..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py
+++ /dev/null
@@ -1,155 +0,0 @@
-"""Logic that powers autocompletion installed by ``pip completion``.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import optparse
-import os
-import sys
-
-from pip._internal.cli.main_parser import create_main_parser
-from pip._internal.commands import commands_dict, create_command
-from pip._internal.utils.misc import get_installed_distributions
-
-
-def autocomplete():
-    """Entry Point for completion of main and subcommand options.
-    """
-    # Don't complete if user hasn't sourced bash_completion file.
-    if 'PIP_AUTO_COMPLETE' not in os.environ:
-        return
-    cwords = os.environ['COMP_WORDS'].split()[1:]
-    cword = int(os.environ['COMP_CWORD'])
-    try:
-        current = cwords[cword - 1]
-    except IndexError:
-        current = ''
-
-    subcommands = list(commands_dict)
-    options = []
-    # subcommand
-    try:
-        subcommand_name = [w for w in cwords if w in subcommands][0]
-    except IndexError:
-        subcommand_name = None
-
-    parser = create_main_parser()
-    # subcommand options
-    if subcommand_name:
-        # special case: 'help' subcommand has no options
-        if subcommand_name == 'help':
-            sys.exit(1)
-        # special case: list locally installed dists for show and uninstall
-        should_list_installed = (
-            subcommand_name in ['show', 'uninstall'] and
-            not current.startswith('-')
-        )
-        if should_list_installed:
-            installed = []
-            lc = current.lower()
-            for dist in get_installed_distributions(local_only=True):
-                if dist.key.startswith(lc) and dist.key not in cwords[1:]:
-                    installed.append(dist.key)
-            # if there are no dists installed, fall back to option completion
-            if installed:
-                for dist in installed:
-                    print(dist)
-                sys.exit(1)
-
-        subcommand = create_command(subcommand_name)
-
-        for opt in subcommand.parser.option_list_all:
-            if opt.help != optparse.SUPPRESS_HELP:
-                for opt_str in opt._long_opts + opt._short_opts:
-                    options.append((opt_str, opt.nargs))
-
-        # filter out previously specified options from available options
-        prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
-        options = [(x, v) for (x, v) in options if x not in prev_opts]
-        # filter options by current input
-        options = [(k, v) for k, v in options if k.startswith(current)]
-        # get completion type given cwords and available subcommand options
-        completion_type = get_path_completion_type(
-            cwords, cword, subcommand.parser.option_list_all,
-        )
-        # get completion files and directories if ``completion_type`` is
-        # ````, ```` or ````
-        if completion_type:
-            options = auto_complete_paths(current, completion_type)
-            options = ((opt, 0) for opt in options)
-        for option in options:
-            opt_label = option[0]
-            # append '=' to options which require args
-            if option[1] and option[0][:2] == "--":
-                opt_label += '='
-            print(opt_label)
-    else:
-        # show main parser options only when necessary
-
-        opts = [i.option_list for i in parser.option_groups]
-        opts.append(parser.option_list)
-        opts = (o for it in opts for o in it)
-        if current.startswith('-'):
-            for opt in opts:
-                if opt.help != optparse.SUPPRESS_HELP:
-                    subcommands += opt._long_opts + opt._short_opts
-        else:
-            # get completion type given cwords and all available options
-            completion_type = get_path_completion_type(cwords, cword, opts)
-            if completion_type:
-                subcommands = auto_complete_paths(current, completion_type)
-
-        print(' '.join([x for x in subcommands if x.startswith(current)]))
-    sys.exit(1)
-
-
-def get_path_completion_type(cwords, cword, opts):
-    """Get the type of path completion (``file``, ``dir``, ``path`` or None)
-
-    :param cwords: same as the environmental variable ``COMP_WORDS``
-    :param cword: same as the environmental variable ``COMP_CWORD``
-    :param opts: The available options to check
-    :return: path completion type (``file``, ``dir``, ``path`` or None)
-    """
-    if cword < 2 or not cwords[cword - 2].startswith('-'):
-        return
-    for opt in opts:
-        if opt.help == optparse.SUPPRESS_HELP:
-            continue
-        for o in str(opt).split('/'):
-            if cwords[cword - 2].split('=')[0] == o:
-                if not opt.metavar or any(
-                        x in ('path', 'file', 'dir')
-                        for x in opt.metavar.split('/')):
-                    return opt.metavar
-
-
-def auto_complete_paths(current, completion_type):
-    """If ``completion_type`` is ``file`` or ``path``, list all regular files
-    and directories starting with ``current``; otherwise only list directories
-    starting with ``current``.
-
-    :param current: The word to be completed
-    :param completion_type: path completion type(`file`, `path` or `dir`)i
-    :return: A generator of regular files and/or directories
-    """
-    directory, filename = os.path.split(current)
-    current_path = os.path.abspath(directory)
-    # Don't complete paths if they can't be accessed
-    if not os.access(current_path, os.R_OK):
-        return
-    filename = os.path.normcase(filename)
-    # list all files that start with ``filename``
-    file_list = (x for x in os.listdir(current_path)
-                 if os.path.normcase(x).startswith(filename))
-    for f in file_list:
-        opt = os.path.join(current_path, f)
-        comp_file = os.path.normcase(os.path.join(directory, f))
-        # complete regular files when there is not ```` after option
-        # complete directories when there is ````, ```` or
-        # ````after option
-        if completion_type != 'dir' and os.path.isfile(opt):
-            yield comp_file
-        elif os.path.isdir(opt):
-            yield os.path.join(comp_file, '')
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/base_command.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
deleted file mode 100644
index dad08c2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/base_command.py
+++ /dev/null
@@ -1,193 +0,0 @@
-"""Base Command class, and related routines"""
-
-from __future__ import absolute_import, print_function
-
-import logging
-import logging.config
-import optparse
-import os
-import platform
-import sys
-import traceback
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.command_context import CommandContextMixIn
-from pip._internal.cli.parser import (
-    ConfigOptionParser,
-    UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.cli.status_codes import (
-    ERROR,
-    PREVIOUS_BUILD_DIR_ERROR,
-    SUCCESS,
-    UNKNOWN_ERROR,
-    VIRTUALENV_NOT_FOUND,
-)
-from pip._internal.exceptions import (
-    BadCommand,
-    CommandError,
-    InstallationError,
-    PreviousBuildDirError,
-    UninstallationError,
-)
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
-from pip._internal.utils.misc import get_prog
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-if MYPY_CHECK_RUNNING:
-    from typing import List, Tuple, Any
-    from optparse import Values
-
-__all__ = ['Command']
-
-logger = logging.getLogger(__name__)
-
-
-class Command(CommandContextMixIn):
-    usage = None  # type: str
-    ignore_require_venv = False  # type: bool
-
-    def __init__(self, name, summary, isolated=False):
-        # type: (str, str, bool) -> None
-        super(Command, self).__init__()
-        parser_kw = {
-            'usage': self.usage,
-            'prog': '%s %s' % (get_prog(), name),
-            'formatter': UpdatingDefaultsHelpFormatter(),
-            'add_help_option': False,
-            'name': name,
-            'description': self.__doc__,
-            'isolated': isolated,
-        }
-
-        self.name = name
-        self.summary = summary
-        self.parser = ConfigOptionParser(**parser_kw)
-
-        # Commands should add options to this option group
-        optgroup_name = '%s Options' % self.name.capitalize()
-        self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
-
-        # Add the general options
-        gen_opts = cmdoptions.make_option_group(
-            cmdoptions.general_group,
-            self.parser,
-        )
-        self.parser.add_option_group(gen_opts)
-
-    def handle_pip_version_check(self, options):
-        # type: (Values) -> None
-        """
-        This is a no-op so that commands by default do not do the pip version
-        check.
-        """
-        # Make sure we do the pip version check if the index_group options
-        # are present.
-        assert not hasattr(options, 'no_index')
-
-    def run(self, options, args):
-        # type: (Values, List[Any]) -> Any
-        raise NotImplementedError
-
-    def parse_args(self, args):
-        # type: (List[str]) -> Tuple
-        # factored out for testability
-        return self.parser.parse_args(args)
-
-    def main(self, args):
-        # type: (List[str]) -> int
-        try:
-            with self.main_context():
-                return self._main(args)
-        finally:
-            logging.shutdown()
-
-    def _main(self, args):
-        # type: (List[str]) -> int
-        options, args = self.parse_args(args)
-
-        # Set verbosity so that it can be used elsewhere.
-        self.verbosity = options.verbose - options.quiet
-
-        level_number = setup_logging(
-            verbosity=self.verbosity,
-            no_color=options.no_color,
-            user_log_file=options.log,
-        )
-
-        if sys.version_info[:2] == (2, 7):
-            message = (
-                "A future version of pip will drop support for Python 2.7. "
-                "More details about Python 2 support in pip, can be found at "
-                "https://pip.pypa.io/en/latest/development/release-process/#python-2-support"  # noqa
-            )
-            if platform.python_implementation() == "CPython":
-                message = (
-                    "Python 2.7 will reach the end of its life on January "
-                    "1st, 2020. Please upgrade your Python as Python 2.7 "
-                    "won't be maintained after that date. "
-                ) + message
-            deprecated(message, replacement=None, gone_in=None)
-
-        # TODO: Try to get these passing down from the command?
-        #       without resorting to os.environ to hold these.
-        #       This also affects isolated builds and it should.
-
-        if options.no_input:
-            os.environ['PIP_NO_INPUT'] = '1'
-
-        if options.exists_action:
-            os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
-
-        if options.require_venv and not self.ignore_require_venv:
-            # If a venv is required check if it can really be found
-            if not running_under_virtualenv():
-                logger.critical(
-                    'Could not find an activated virtualenv (required).'
-                )
-                sys.exit(VIRTUALENV_NOT_FOUND)
-
-        try:
-            status = self.run(options, args)
-            # FIXME: all commands should return an exit status
-            # and when it is done, isinstance is not needed anymore
-            if isinstance(status, int):
-                return status
-        except PreviousBuildDirError as exc:
-            logger.critical(str(exc))
-            logger.debug('Exception information:', exc_info=True)
-
-            return PREVIOUS_BUILD_DIR_ERROR
-        except (InstallationError, UninstallationError, BadCommand) as exc:
-            logger.critical(str(exc))
-            logger.debug('Exception information:', exc_info=True)
-
-            return ERROR
-        except CommandError as exc:
-            logger.critical('%s', exc)
-            logger.debug('Exception information:', exc_info=True)
-
-            return ERROR
-        except BrokenStdoutLoggingError:
-            # Bypass our logger and write any remaining messages to stderr
-            # because stdout no longer works.
-            print('ERROR: Pipe to stdout was broken', file=sys.stderr)
-            if level_number <= logging.DEBUG:
-                traceback.print_exc(file=sys.stderr)
-
-            return ERROR
-        except KeyboardInterrupt:
-            logger.critical('Operation cancelled by user')
-            logger.debug('Exception information:', exc_info=True)
-
-            return ERROR
-        except BaseException:
-            logger.critical('Exception:', exc_info=True)
-
-            return UNKNOWN_ERROR
-        finally:
-            self.handle_pip_version_check(options)
-
-        return SUCCESS
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
deleted file mode 100644
index d7c6e34..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py
+++ /dev/null
@@ -1,909 +0,0 @@
-"""
-shared options and groups
-
-The principle here is to define options once, but *not* instantiate them
-globally. One reason being that options with action='append' can carry state
-between parses. pip parses general options twice internally, and shouldn't
-pass on state. To be consistent, all options will follow this design.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import textwrap
-import warnings
-from distutils.util import strtobool
-from functools import partial
-from optparse import SUPPRESS_HELP, Option, OptionGroup
-from textwrap import dedent
-
-from pip._internal.exceptions import CommandError
-from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.index import PyPI
-from pip._internal.models.target_python import TargetPython
-from pip._internal.utils.hashes import STRONG_HASHES
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import BAR_TYPES
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, Callable, Dict, Optional, Tuple
-    from optparse import OptionParser, Values
-    from pip._internal.cli.parser import ConfigOptionParser
-
-logger = logging.getLogger(__name__)
-
-
-def raise_option_error(parser, option, msg):
-    """
-    Raise an option parsing error using parser.error().
-
-    Args:
-      parser: an OptionParser instance.
-      option: an Option instance.
-      msg: the error text.
-    """
-    msg = '{} error: {}'.format(option, msg)
-    msg = textwrap.fill(' '.join(msg.split()))
-    parser.error(msg)
-
-
-def make_option_group(group, parser):
-    # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
-    """
-    Return an OptionGroup object
-    group  -- assumed to be dict with 'name' and 'options' keys
-    parser -- an optparse Parser
-    """
-    option_group = OptionGroup(parser, group['name'])
-    for option in group['options']:
-        option_group.add_option(option())
-    return option_group
-
-
-def check_install_build_global(options, check_options=None):
-    # type: (Values, Optional[Values]) -> None
-    """Disable wheels if per-setup.py call options are set.
-
-    :param options: The OptionParser options to update.
-    :param check_options: The options to check, if not supplied defaults to
-        options.
-    """
-    if check_options is None:
-        check_options = options
-
-    def getname(n):
-        return getattr(check_options, n, None)
-    names = ["build_options", "global_options", "install_options"]
-    if any(map(getname, names)):
-        control = options.format_control
-        control.disallow_binaries()
-        warnings.warn(
-            'Disabling all use of wheels due to the use of --build-options '
-            '/ --global-options / --install-options.', stacklevel=2,
-        )
-
-
-def check_dist_restriction(options, check_target=False):
-    # type: (Values, bool) -> None
-    """Function for determining if custom platform options are allowed.
-
-    :param options: The OptionParser options.
-    :param check_target: Whether or not to check if --target is being used.
-    """
-    dist_restriction_set = any([
-        options.python_version,
-        options.platform,
-        options.abi,
-        options.implementation,
-    ])
-
-    binary_only = FormatControl(set(), {':all:'})
-    sdist_dependencies_allowed = (
-        options.format_control != binary_only and
-        not options.ignore_dependencies
-    )
-
-    # Installations or downloads using dist restrictions must not combine
-    # source distributions and dist-specific wheels, as they are not
-    # guaranteed to be locally compatible.
-    if dist_restriction_set and sdist_dependencies_allowed:
-        raise CommandError(
-            "When restricting platform and interpreter constraints using "
-            "--python-version, --platform, --abi, or --implementation, "
-            "either --no-deps must be set, or --only-binary=:all: must be "
-            "set and --no-binary must not be set (or must be set to "
-            ":none:)."
-        )
-
-    if check_target:
-        if dist_restriction_set and not options.target_dir:
-            raise CommandError(
-                "Can not use any platform or abi specific options unless "
-                "installing via '--target'"
-            )
-
-
-###########
-# options #
-###########
-
-help_ = partial(
-    Option,
-    '-h', '--help',
-    dest='help',
-    action='help',
-    help='Show help.',
-)  # type: Callable[..., Option]
-
-isolated_mode = partial(
-    Option,
-    "--isolated",
-    dest="isolated_mode",
-    action="store_true",
-    default=False,
-    help=(
-        "Run pip in an isolated mode, ignoring environment variables and user "
-        "configuration."
-    ),
-)  # type: Callable[..., Option]
-
-require_virtualenv = partial(
-    Option,
-    # Run only if inside a virtualenv, bail if not.
-    '--require-virtualenv', '--require-venv',
-    dest='require_venv',
-    action='store_true',
-    default=False,
-    help=SUPPRESS_HELP
-)  # type: Callable[..., Option]
-
-verbose = partial(
-    Option,
-    '-v', '--verbose',
-    dest='verbose',
-    action='count',
-    default=0,
-    help='Give more output. Option is additive, and can be used up to 3 times.'
-)  # type: Callable[..., Option]
-
-no_color = partial(
-    Option,
-    '--no-color',
-    dest='no_color',
-    action='store_true',
-    default=False,
-    help="Suppress colored output",
-)  # type: Callable[..., Option]
-
-version = partial(
-    Option,
-    '-V', '--version',
-    dest='version',
-    action='store_true',
-    help='Show version and exit.',
-)  # type: Callable[..., Option]
-
-quiet = partial(
-    Option,
-    '-q', '--quiet',
-    dest='quiet',
-    action='count',
-    default=0,
-    help=(
-        'Give less output. Option is additive, and can be used up to 3'
-        ' times (corresponding to WARNING, ERROR, and CRITICAL logging'
-        ' levels).'
-    ),
-)  # type: Callable[..., Option]
-
-progress_bar = partial(
-    Option,
-    '--progress-bar',
-    dest='progress_bar',
-    type='choice',
-    choices=list(BAR_TYPES.keys()),
-    default='on',
-    help=(
-        'Specify type of progress to be displayed [' +
-        '|'.join(BAR_TYPES.keys()) + '] (default: %default)'
-    ),
-)  # type: Callable[..., Option]
-
-log = partial(
-    Option,
-    "--log", "--log-file", "--local-log",
-    dest="log",
-    metavar="path",
-    help="Path to a verbose appending log."
-)  # type: Callable[..., Option]
-
-no_input = partial(
-    Option,
-    # Don't ask for input
-    '--no-input',
-    dest='no_input',
-    action='store_true',
-    default=False,
-    help=SUPPRESS_HELP
-)  # type: Callable[..., Option]
-
-proxy = partial(
-    Option,
-    '--proxy',
-    dest='proxy',
-    type='str',
-    default='',
-    help="Specify a proxy in the form [user:passwd@]proxy.server:port."
-)  # type: Callable[..., Option]
-
-retries = partial(
-    Option,
-    '--retries',
-    dest='retries',
-    type='int',
-    default=5,
-    help="Maximum number of retries each connection should attempt "
-         "(default %default times).",
-)  # type: Callable[..., Option]
-
-timeout = partial(
-    Option,
-    '--timeout', '--default-timeout',
-    metavar='sec',
-    dest='timeout',
-    type='float',
-    default=15,
-    help='Set the socket timeout (default %default seconds).',
-)  # type: Callable[..., Option]
-
-skip_requirements_regex = partial(
-    Option,
-    # A regex to be used to skip requirements
-    '--skip-requirements-regex',
-    dest='skip_requirements_regex',
-    type='str',
-    default='',
-    help=SUPPRESS_HELP,
-)  # type: Callable[..., Option]
-
-
-def exists_action():
-    # type: () -> Option
-    return Option(
-        # Option when path already exist
-        '--exists-action',
-        dest='exists_action',
-        type='choice',
-        choices=['s', 'i', 'w', 'b', 'a'],
-        default=[],
-        action='append',
-        metavar='action',
-        help="Default action when a path already exists: "
-             "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
-    )
-
-
-cert = partial(
-    Option,
-    '--cert',
-    dest='cert',
-    type='str',
-    metavar='path',
-    help="Path to alternate CA bundle.",
-)  # type: Callable[..., Option]
-
-client_cert = partial(
-    Option,
-    '--client-cert',
-    dest='client_cert',
-    type='str',
-    default=None,
-    metavar='path',
-    help="Path to SSL client certificate, a single file containing the "
-         "private key and the certificate in PEM format.",
-)  # type: Callable[..., Option]
-
-index_url = partial(
-    Option,
-    '-i', '--index-url', '--pypi-url',
-    dest='index_url',
-    metavar='URL',
-    default=PyPI.simple_url,
-    help="Base URL of the Python Package Index (default %default). "
-         "This should point to a repository compliant with PEP 503 "
-         "(the simple repository API) or a local directory laid out "
-         "in the same format.",
-)  # type: Callable[..., Option]
-
-
-def extra_index_url():
-    return Option(
-        '--extra-index-url',
-        dest='extra_index_urls',
-        metavar='URL',
-        action='append',
-        default=[],
-        help="Extra URLs of package indexes to use in addition to "
-             "--index-url. Should follow the same rules as "
-             "--index-url.",
-    )
-
-
-no_index = partial(
-    Option,
-    '--no-index',
-    dest='no_index',
-    action='store_true',
-    default=False,
-    help='Ignore package index (only looking at --find-links URLs instead).',
-)  # type: Callable[..., Option]
-
-
-def find_links():
-    # type: () -> Option
-    return Option(
-        '-f', '--find-links',
-        dest='find_links',
-        action='append',
-        default=[],
-        metavar='url',
-        help="If a url or path to an html file, then parse for links to "
-             "archives. If a local path or file:// url that's a directory, "
-             "then look for archives in the directory listing.",
-    )
-
-
-def trusted_host():
-    # type: () -> Option
-    return Option(
-        "--trusted-host",
-        dest="trusted_hosts",
-        action="append",
-        metavar="HOSTNAME",
-        default=[],
-        help="Mark this host or host:port pair as trusted, even though it "
-             "does not have valid or any HTTPS.",
-    )
-
-
-def constraints():
-    # type: () -> Option
-    return Option(
-        '-c', '--constraint',
-        dest='constraints',
-        action='append',
-        default=[],
-        metavar='file',
-        help='Constrain versions using the given constraints file. '
-        'This option can be used multiple times.'
-    )
-
-
-def requirements():
-    # type: () -> Option
-    return Option(
-        '-r', '--requirement',
-        dest='requirements',
-        action='append',
-        default=[],
-        metavar='file',
-        help='Install from the given requirements file. '
-        'This option can be used multiple times.'
-    )
-
-
-def editable():
-    # type: () -> Option
-    return Option(
-        '-e', '--editable',
-        dest='editables',
-        action='append',
-        default=[],
-        metavar='path/url',
-        help=('Install a project in editable mode (i.e. setuptools '
-              '"develop mode") from a local project path or a VCS url.'),
-    )
-
-
-src = partial(
-    Option,
-    '--src', '--source', '--source-dir', '--source-directory',
-    dest='src_dir',
-    metavar='dir',
-    default=get_src_prefix(),
-    help='Directory to check out editable projects into. '
-    'The default in a virtualenv is "/src". '
-    'The default for global installs is "/src".'
-)  # type: Callable[..., Option]
-
-
-def _get_format_control(values, option):
-    # type: (Values, Option) -> Any
-    """Get a format_control object."""
-    return getattr(values, option.dest)
-
-
-def _handle_no_binary(option, opt_str, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    existing = _get_format_control(parser.values, option)
-    FormatControl.handle_mutual_excludes(
-        value, existing.no_binary, existing.only_binary,
-    )
-
-
-def _handle_only_binary(option, opt_str, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    existing = _get_format_control(parser.values, option)
-    FormatControl.handle_mutual_excludes(
-        value, existing.only_binary, existing.no_binary,
-    )
-
-
-def no_binary():
-    # type: () -> Option
-    format_control = FormatControl(set(), set())
-    return Option(
-        "--no-binary", dest="format_control", action="callback",
-        callback=_handle_no_binary, type="str",
-        default=format_control,
-        help="Do not use binary packages. Can be supplied multiple times, and "
-             "each time adds to the existing value. Accepts either :all: to "
-             "disable all binary packages, :none: to empty the set, or one or "
-             "more package names with commas between them (no colons). Note "
-             "that some packages are tricky to compile and may fail to "
-             "install when this option is used on them.",
-    )
-
-
-def only_binary():
-    # type: () -> Option
-    format_control = FormatControl(set(), set())
-    return Option(
-        "--only-binary", dest="format_control", action="callback",
-        callback=_handle_only_binary, type="str",
-        default=format_control,
-        help="Do not use source packages. Can be supplied multiple times, and "
-             "each time adds to the existing value. Accepts either :all: to "
-             "disable all source packages, :none: to empty the set, or one or "
-             "more package names with commas between them. Packages without "
-             "binary distributions will fail to install when this option is "
-             "used on them.",
-    )
-
-
-platform = partial(
-    Option,
-    '--platform',
-    dest='platform',
-    metavar='platform',
-    default=None,
-    help=("Only use wheels compatible with . "
-          "Defaults to the platform of the running system."),
-)  # type: Callable[..., Option]
-
-
-# This was made a separate function for unit-testing purposes.
-def _convert_python_version(value):
-    # type: (str) -> Tuple[Tuple[int, ...], Optional[str]]
-    """
-    Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
-
-    :return: A 2-tuple (version_info, error_msg), where `error_msg` is
-        non-None if and only if there was a parsing error.
-    """
-    if not value:
-        # The empty string is the same as not providing a value.
-        return (None, None)
-
-    parts = value.split('.')
-    if len(parts) > 3:
-        return ((), 'at most three version parts are allowed')
-
-    if len(parts) == 1:
-        # Then we are in the case of "3" or "37".
-        value = parts[0]
-        if len(value) > 1:
-            parts = [value[0], value[1:]]
-
-    try:
-        version_info = tuple(int(part) for part in parts)
-    except ValueError:
-        return ((), 'each version part must be an integer')
-
-    return (version_info, None)
-
-
-def _handle_python_version(option, opt_str, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    """
-    Handle a provided --python-version value.
-    """
-    version_info, error_msg = _convert_python_version(value)
-    if error_msg is not None:
-        msg = (
-            'invalid --python-version value: {!r}: {}'.format(
-                value, error_msg,
-            )
-        )
-        raise_option_error(parser, option=option, msg=msg)
-
-    parser.values.python_version = version_info
-
-
-python_version = partial(
-    Option,
-    '--python-version',
-    dest='python_version',
-    metavar='python_version',
-    action='callback',
-    callback=_handle_python_version, type='str',
-    default=None,
-    help=dedent("""\
-    The Python interpreter version to use for wheel and "Requires-Python"
-    compatibility checks. Defaults to a version derived from the running
-    interpreter. The version can be specified using up to three dot-separated
-    integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
-    version can also be given as a string without dots (e.g. "37" for 3.7.0).
-    """),
-)  # type: Callable[..., Option]
-
-
-implementation = partial(
-    Option,
-    '--implementation',
-    dest='implementation',
-    metavar='implementation',
-    default=None,
-    help=("Only use wheels compatible with Python "
-          "implementation , e.g. 'pp', 'jy', 'cp', "
-          " or 'ip'. If not specified, then the current "
-          "interpreter implementation is used.  Use 'py' to force "
-          "implementation-agnostic wheels."),
-)  # type: Callable[..., Option]
-
-
-abi = partial(
-    Option,
-    '--abi',
-    dest='abi',
-    metavar='abi',
-    default=None,
-    help=("Only use wheels compatible with Python "
-          "abi , e.g. 'pypy_41'.  If not specified, then the "
-          "current interpreter abi tag is used.  Generally "
-          "you will need to specify --implementation, "
-          "--platform, and --python-version when using "
-          "this option."),
-)  # type: Callable[..., Option]
-
-
-def add_target_python_options(cmd_opts):
-    # type: (OptionGroup) -> None
-    cmd_opts.add_option(platform())
-    cmd_opts.add_option(python_version())
-    cmd_opts.add_option(implementation())
-    cmd_opts.add_option(abi())
-
-
-def make_target_python(options):
-    # type: (Values) -> TargetPython
-    target_python = TargetPython(
-        platform=options.platform,
-        py_version_info=options.python_version,
-        abi=options.abi,
-        implementation=options.implementation,
-    )
-
-    return target_python
-
-
-def prefer_binary():
-    # type: () -> Option
-    return Option(
-        "--prefer-binary",
-        dest="prefer_binary",
-        action="store_true",
-        default=False,
-        help="Prefer older binary packages over newer source packages."
-    )
-
-
-cache_dir = partial(
-    Option,
-    "--cache-dir",
-    dest="cache_dir",
-    default=USER_CACHE_DIR,
-    metavar="dir",
-    help="Store the cache data in ."
-)  # type: Callable[..., Option]
-
-
-def _handle_no_cache_dir(option, opt, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    """
-    Process a value provided for the --no-cache-dir option.
-
-    This is an optparse.Option callback for the --no-cache-dir option.
-    """
-    # The value argument will be None if --no-cache-dir is passed via the
-    # command-line, since the option doesn't accept arguments.  However,
-    # the value can be non-None if the option is triggered e.g. by an
-    # environment variable, like PIP_NO_CACHE_DIR=true.
-    if value is not None:
-        # Then parse the string value to get argument error-checking.
-        try:
-            strtobool(value)
-        except ValueError as exc:
-            raise_option_error(parser, option=option, msg=str(exc))
-
-    # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
-    # converted to 0 (like "false" or "no") caused cache_dir to be disabled
-    # rather than enabled (logic would say the latter).  Thus, we disable
-    # the cache directory not just on values that parse to True, but (for
-    # backwards compatibility reasons) also on values that parse to False.
-    # In other words, always set it to False if the option is provided in
-    # some (valid) form.
-    parser.values.cache_dir = False
-
-
-no_cache = partial(
-    Option,
-    "--no-cache-dir",
-    dest="cache_dir",
-    action="callback",
-    callback=_handle_no_cache_dir,
-    help="Disable the cache.",
-)  # type: Callable[..., Option]
-
-no_deps = partial(
-    Option,
-    '--no-deps', '--no-dependencies',
-    dest='ignore_dependencies',
-    action='store_true',
-    default=False,
-    help="Don't install package dependencies.",
-)  # type: Callable[..., Option]
-
-build_dir = partial(
-    Option,
-    '-b', '--build', '--build-dir', '--build-directory',
-    dest='build_dir',
-    metavar='dir',
-    help='Directory to unpack packages into and build in. Note that '
-         'an initial build still takes place in a temporary directory. '
-         'The location of temporary directories can be controlled by setting '
-         'the TMPDIR environment variable (TEMP on Windows) appropriately. '
-         'When passed, build directories are not cleaned in case of failures.'
-)  # type: Callable[..., Option]
-
-ignore_requires_python = partial(
-    Option,
-    '--ignore-requires-python',
-    dest='ignore_requires_python',
-    action='store_true',
-    help='Ignore the Requires-Python information.'
-)  # type: Callable[..., Option]
-
-no_build_isolation = partial(
-    Option,
-    '--no-build-isolation',
-    dest='build_isolation',
-    action='store_false',
-    default=True,
-    help='Disable isolation when building a modern source distribution. '
-         'Build dependencies specified by PEP 518 must be already installed '
-         'if this option is used.'
-)  # type: Callable[..., Option]
-
-
-def _handle_no_use_pep517(option, opt, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    """
-    Process a value provided for the --no-use-pep517 option.
-
-    This is an optparse.Option callback for the no_use_pep517 option.
-    """
-    # Since --no-use-pep517 doesn't accept arguments, the value argument
-    # will be None if --no-use-pep517 is passed via the command-line.
-    # However, the value can be non-None if the option is triggered e.g.
-    # by an environment variable, for example "PIP_NO_USE_PEP517=true".
-    if value is not None:
-        msg = """A value was passed for --no-use-pep517,
-        probably using either the PIP_NO_USE_PEP517 environment variable
-        or the "no-use-pep517" config file option. Use an appropriate value
-        of the PIP_USE_PEP517 environment variable or the "use-pep517"
-        config file option instead.
-        """
-        raise_option_error(parser, option=option, msg=msg)
-
-    # Otherwise, --no-use-pep517 was passed via the command-line.
-    parser.values.use_pep517 = False
-
-
-use_pep517 = partial(
-    Option,
-    '--use-pep517',
-    dest='use_pep517',
-    action='store_true',
-    default=None,
-    help='Use PEP 517 for building source distributions '
-         '(use --no-use-pep517 to force legacy behaviour).'
-)  # type: Any
-
-no_use_pep517 = partial(
-    Option,
-    '--no-use-pep517',
-    dest='use_pep517',
-    action='callback',
-    callback=_handle_no_use_pep517,
-    default=None,
-    help=SUPPRESS_HELP
-)  # type: Any
-
-install_options = partial(
-    Option,
-    '--install-option',
-    dest='install_options',
-    action='append',
-    metavar='options',
-    help="Extra arguments to be supplied to the setup.py install "
-         "command (use like --install-option=\"--install-scripts=/usr/local/"
-         "bin\"). Use multiple --install-option options to pass multiple "
-         "options to setup.py install. If you are using an option with a "
-         "directory path, be sure to use absolute path.",
-)  # type: Callable[..., Option]
-
-global_options = partial(
-    Option,
-    '--global-option',
-    dest='global_options',
-    action='append',
-    metavar='options',
-    help="Extra global options to be supplied to the setup.py "
-         "call before the install command.",
-)  # type: Callable[..., Option]
-
-no_clean = partial(
-    Option,
-    '--no-clean',
-    action='store_true',
-    default=False,
-    help="Don't clean up build directories."
-)  # type: Callable[..., Option]
-
-pre = partial(
-    Option,
-    '--pre',
-    action='store_true',
-    default=False,
-    help="Include pre-release and development versions. By default, "
-         "pip only finds stable versions.",
-)  # type: Callable[..., Option]
-
-disable_pip_version_check = partial(
-    Option,
-    "--disable-pip-version-check",
-    dest="disable_pip_version_check",
-    action="store_true",
-    default=False,
-    help="Don't periodically check PyPI to determine whether a new version "
-         "of pip is available for download. Implied with --no-index.",
-)  # type: Callable[..., Option]
-
-
-# Deprecated, Remove later
-always_unzip = partial(
-    Option,
-    '-Z', '--always-unzip',
-    dest='always_unzip',
-    action='store_true',
-    help=SUPPRESS_HELP,
-)  # type: Callable[..., Option]
-
-
-def _handle_merge_hash(option, opt_str, value, parser):
-    # type: (Option, str, str, OptionParser) -> None
-    """Given a value spelled "algo:digest", append the digest to a list
-    pointed to in a dict by the algo name."""
-    if not parser.values.hashes:
-        parser.values.hashes = {}
-    try:
-        algo, digest = value.split(':', 1)
-    except ValueError:
-        parser.error('Arguments to %s must be a hash name '
-                     'followed by a value, like --hash=sha256:abcde...' %
-                     opt_str)
-    if algo not in STRONG_HASHES:
-        parser.error('Allowed hash algorithms for %s are %s.' %
-                     (opt_str, ', '.join(STRONG_HASHES)))
-    parser.values.hashes.setdefault(algo, []).append(digest)
-
-
-hash = partial(
-    Option,
-    '--hash',
-    # Hash values eventually end up in InstallRequirement.hashes due to
-    # __dict__ copying in process_line().
-    dest='hashes',
-    action='callback',
-    callback=_handle_merge_hash,
-    type='string',
-    help="Verify that the package's archive matches this "
-         'hash before installing. Example: --hash=sha256:abcdef...',
-)  # type: Callable[..., Option]
-
-
-require_hashes = partial(
-    Option,
-    '--require-hashes',
-    dest='require_hashes',
-    action='store_true',
-    default=False,
-    help='Require a hash to check each requirement against, for '
-         'repeatable installs. This option is implied when any package in a '
-         'requirements file has a --hash option.',
-)  # type: Callable[..., Option]
-
-
-list_path = partial(
-    Option,
-    '--path',
-    dest='path',
-    action='append',
-    help='Restrict to the specified installation path for listing '
-         'packages (can be used multiple times).'
-)  # type: Callable[..., Option]
-
-
-def check_list_path_option(options):
-    # type: (Values) -> None
-    if options.path and (options.user or options.local):
-        raise CommandError(
-            "Cannot combine '--path' with '--user' or '--local'"
-        )
-
-
-##########
-# groups #
-##########
-
-general_group = {
-    'name': 'General Options',
-    'options': [
-        help_,
-        isolated_mode,
-        require_virtualenv,
-        verbose,
-        version,
-        quiet,
-        log,
-        no_input,
-        proxy,
-        retries,
-        timeout,
-        skip_requirements_regex,
-        exists_action,
-        trusted_host,
-        cert,
-        client_cert,
-        cache_dir,
-        no_cache,
-        disable_pip_version_check,
-        no_color,
-    ]
-}  # type: Dict[str, Any]
-
-index_group = {
-    'name': 'Package Index Options',
-    'options': [
-        index_url,
-        extra_index_url,
-        no_index,
-        find_links,
-    ]
-}  # type: Dict[str, Any]
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/command_context.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/command_context.py
deleted file mode 100644
index 3ab255f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/command_context.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from contextlib import contextmanager
-
-from pip._vendor.contextlib2 import ExitStack
-
-
-class CommandContextMixIn(object):
-    def __init__(self):
-        super(CommandContextMixIn, self).__init__()
-        self._in_main_context = False
-        self._main_context = ExitStack()
-
-    @contextmanager
-    def main_context(self):
-        assert not self._in_main_context
-
-        self._in_main_context = True
-        try:
-            with self._main_context:
-                yield
-        finally:
-            self._in_main_context = False
-
-    def enter_context(self, context_provider):
-        assert self._in_main_context
-
-        return self._main_context.enter_context(context_provider)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
deleted file mode 100644
index a89821d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""A single place for constructing and exposing the main parser
-"""
-
-import os
-import sys
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.parser import (
-    ConfigOptionParser,
-    UpdatingDefaultsHelpFormatter,
-)
-from pip._internal.commands import commands_dict, get_similar_commands
-from pip._internal.exceptions import CommandError
-from pip._internal.utils.misc import get_pip_version, get_prog
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Tuple, List
-
-
-__all__ = ["create_main_parser", "parse_command"]
-
-
-def create_main_parser():
-    # type: () -> ConfigOptionParser
-    """Creates and returns the main parser for pip's CLI
-    """
-
-    parser_kw = {
-        'usage': '\n%prog  [options]',
-        'add_help_option': False,
-        'formatter': UpdatingDefaultsHelpFormatter(),
-        'name': 'global',
-        'prog': get_prog(),
-    }
-
-    parser = ConfigOptionParser(**parser_kw)
-    parser.disable_interspersed_args()
-
-    parser.version = get_pip_version()
-
-    # add the general options
-    gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
-    parser.add_option_group(gen_opts)
-
-    # so the help formatter knows
-    parser.main = True  # type: ignore
-
-    # create command listing for description
-    description = [''] + [
-        '%-27s %s' % (name, command_info.summary)
-        for name, command_info in commands_dict.items()
-    ]
-    parser.description = '\n'.join(description)
-
-    return parser
-
-
-def parse_command(args):
-    # type: (List[str]) -> Tuple[str, List[str]]
-    parser = create_main_parser()
-
-    # Note: parser calls disable_interspersed_args(), so the result of this
-    # call is to split the initial args into the general options before the
-    # subcommand and everything else.
-    # For example:
-    #  args: ['--timeout=5', 'install', '--user', 'INITools']
-    #  general_options: ['--timeout==5']
-    #  args_else: ['install', '--user', 'INITools']
-    general_options, args_else = parser.parse_args(args)
-
-    # --version
-    if general_options.version:
-        sys.stdout.write(parser.version)  # type: ignore
-        sys.stdout.write(os.linesep)
-        sys.exit()
-
-    # pip || pip help -> print_help()
-    if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
-        parser.print_help()
-        sys.exit()
-
-    # the subcommand name
-    cmd_name = args_else[0]
-
-    if cmd_name not in commands_dict:
-        guess = get_similar_commands(cmd_name)
-
-        msg = ['unknown command "%s"' % cmd_name]
-        if guess:
-            msg.append('maybe you meant "%s"' % guess)
-
-        raise CommandError(' - '.join(msg))
-
-    # all the args without the subcommand
-    cmd_args = args[:]
-    cmd_args.remove(cmd_name)
-
-    return cmd_name, cmd_args
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/parser.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/parser.py
deleted file mode 100644
index c99456b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/parser.py
+++ /dev/null
@@ -1,265 +0,0 @@
-"""Base option parser setup"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import optparse
-import sys
-import textwrap
-from distutils.util import strtobool
-
-from pip._vendor.six import string_types
-
-from pip._internal.cli.status_codes import UNKNOWN_ERROR
-from pip._internal.configuration import Configuration, ConfigurationError
-from pip._internal.utils.compat import get_terminal_size
-
-logger = logging.getLogger(__name__)
-
-
-class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
-    """A prettier/less verbose help formatter for optparse."""
-
-    def __init__(self, *args, **kwargs):
-        # help position must be aligned with __init__.parseopts.description
-        kwargs['max_help_position'] = 30
-        kwargs['indent_increment'] = 1
-        kwargs['width'] = get_terminal_size()[0] - 2
-        optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
-
-    def format_option_strings(self, option):
-        return self._format_option_strings(option, ' <%s>', ', ')
-
-    def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
-        """
-        Return a comma-separated list of option strings and metavars.
-
-        :param option:  tuple of (short opt, long opt), e.g: ('-f', '--format')
-        :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
-        :param optsep:  separator
-        """
-        opts = []
-
-        if option._short_opts:
-            opts.append(option._short_opts[0])
-        if option._long_opts:
-            opts.append(option._long_opts[0])
-        if len(opts) > 1:
-            opts.insert(1, optsep)
-
-        if option.takes_value():
-            metavar = option.metavar or option.dest.lower()
-            opts.append(mvarfmt % metavar.lower())
-
-        return ''.join(opts)
-
-    def format_heading(self, heading):
-        if heading == 'Options':
-            return ''
-        return heading + ':\n'
-
-    def format_usage(self, usage):
-        """
-        Ensure there is only one newline between usage and the first heading
-        if there is no description.
-        """
-        msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), "  ")
-        return msg
-
-    def format_description(self, description):
-        # leave full control over description to us
-        if description:
-            if hasattr(self.parser, 'main'):
-                label = 'Commands'
-            else:
-                label = 'Description'
-            # some doc strings have initial newlines, some don't
-            description = description.lstrip('\n')
-            # some doc strings have final newlines and spaces, some don't
-            description = description.rstrip()
-            # dedent, then reindent
-            description = self.indent_lines(textwrap.dedent(description), "  ")
-            description = '%s:\n%s\n' % (label, description)
-            return description
-        else:
-            return ''
-
-    def format_epilog(self, epilog):
-        # leave full control over epilog to us
-        if epilog:
-            return epilog
-        else:
-            return ''
-
-    def indent_lines(self, text, indent):
-        new_lines = [indent + line for line in text.split('\n')]
-        return "\n".join(new_lines)
-
-
-class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
-    """Custom help formatter for use in ConfigOptionParser.
-
-    This is updates the defaults before expanding them, allowing
-    them to show up correctly in the help listing.
-    """
-
-    def expand_default(self, option):
-        if self.parser is not None:
-            self.parser._update_defaults(self.parser.defaults)
-        return optparse.IndentedHelpFormatter.expand_default(self, option)
-
-
-class CustomOptionParser(optparse.OptionParser):
-
-    def insert_option_group(self, idx, *args, **kwargs):
-        """Insert an OptionGroup at a given position."""
-        group = self.add_option_group(*args, **kwargs)
-
-        self.option_groups.pop()
-        self.option_groups.insert(idx, group)
-
-        return group
-
-    @property
-    def option_list_all(self):
-        """Get a list of all options, including those in option groups."""
-        res = self.option_list[:]
-        for i in self.option_groups:
-            res.extend(i.option_list)
-
-        return res
-
-
-class ConfigOptionParser(CustomOptionParser):
-    """Custom option parser which updates its defaults by checking the
-    configuration files and environmental variables"""
-
-    def __init__(self, *args, **kwargs):
-        self.name = kwargs.pop('name')
-
-        isolated = kwargs.pop("isolated", False)
-        self.config = Configuration(isolated)
-
-        assert self.name
-        optparse.OptionParser.__init__(self, *args, **kwargs)
-
-    def check_default(self, option, key, val):
-        try:
-            return option.check_value(key, val)
-        except optparse.OptionValueError as exc:
-            print("An error occurred during configuration: %s" % exc)
-            sys.exit(3)
-
-    def _get_ordered_configuration_items(self):
-        # Configuration gives keys in an unordered manner. Order them.
-        override_order = ["global", self.name, ":env:"]
-
-        # Pool the options into different groups
-        section_items = {name: [] for name in override_order}
-        for section_key, val in self.config.items():
-            # ignore empty values
-            if not val:
-                logger.debug(
-                    "Ignoring configuration key '%s' as it's value is empty.",
-                    section_key
-                )
-                continue
-
-            section, key = section_key.split(".", 1)
-            if section in override_order:
-                section_items[section].append((key, val))
-
-        # Yield each group in their override order
-        for section in override_order:
-            for key, val in section_items[section]:
-                yield key, val
-
-    def _update_defaults(self, defaults):
-        """Updates the given defaults with values from the config files and
-        the environ. Does a little special handling for certain types of
-        options (lists)."""
-
-        # Accumulate complex default state.
-        self.values = optparse.Values(self.defaults)
-        late_eval = set()
-        # Then set the options with those values
-        for key, val in self._get_ordered_configuration_items():
-            # '--' because configuration supports only long names
-            option = self.get_option('--' + key)
-
-            # Ignore options not present in this parser. E.g. non-globals put
-            # in [global] by users that want them to apply to all applicable
-            # commands.
-            if option is None:
-                continue
-
-            if option.action in ('store_true', 'store_false', 'count'):
-                try:
-                    val = strtobool(val)
-                except ValueError:
-                    error_msg = invalid_config_error_message(
-                        option.action, key, val
-                    )
-                    self.error(error_msg)
-
-            elif option.action == 'append':
-                val = val.split()
-                val = [self.check_default(option, key, v) for v in val]
-            elif option.action == 'callback':
-                late_eval.add(option.dest)
-                opt_str = option.get_opt_string()
-                val = option.convert_value(opt_str, val)
-                # From take_action
-                args = option.callback_args or ()
-                kwargs = option.callback_kwargs or {}
-                option.callback(option, opt_str, val, self, *args, **kwargs)
-            else:
-                val = self.check_default(option, key, val)
-
-            defaults[option.dest] = val
-
-        for key in late_eval:
-            defaults[key] = getattr(self.values, key)
-        self.values = None
-        return defaults
-
-    def get_default_values(self):
-        """Overriding to make updating the defaults after instantiation of
-        the option parser possible, _update_defaults() does the dirty work."""
-        if not self.process_default_values:
-            # Old, pre-Optik 1.5 behaviour.
-            return optparse.Values(self.defaults)
-
-        # Load the configuration, or error out in case of an error
-        try:
-            self.config.load()
-        except ConfigurationError as err:
-            self.exit(UNKNOWN_ERROR, str(err))
-
-        defaults = self._update_defaults(self.defaults.copy())  # ours
-        for option in self._get_all_options():
-            default = defaults.get(option.dest)
-            if isinstance(default, string_types):
-                opt_str = option.get_opt_string()
-                defaults[option.dest] = option.check_value(opt_str, default)
-        return optparse.Values(defaults)
-
-    def error(self, msg):
-        self.print_usage(sys.stderr)
-        self.exit(UNKNOWN_ERROR, "%s\n" % msg)
-
-
-def invalid_config_error_message(action, key, val):
-    """Returns a better error message when invalid configuration option
-    is provided."""
-    if action in ('store_true', 'store_false'):
-        return ("{0} is not a valid value for {1} option, "
-                "please specify a boolean value like yes/no, "
-                "true/false or 1/0 instead.").format(val, key)
-
-    return ("{0} is not a valid value for {1} option, "
-            "please specify a numerical value like 1/0 "
-            "instead.").format(val, key)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/req_command.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/req_command.py
deleted file mode 100644
index 203e86a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/req_command.py
+++ /dev/null
@@ -1,304 +0,0 @@
-"""Contains the Command base classes that depend on PipSession.
-
-The classes in this module are in a separate module so the commands not
-needing download / PackageFinder capability don't unnecessarily import the
-PackageFinder machinery and all its vendored dependencies, etc.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import os
-from functools import partial
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.command_context import CommandContextMixIn
-from pip._internal.exceptions import CommandError
-from pip._internal.index import PackageFinder
-from pip._internal.legacy_resolve import Resolver
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.network.session import PipSession
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import (
-    install_req_from_editable,
-    install_req_from_line,
-    install_req_from_req_string,
-)
-from pip._internal.req.req_file import parse_requirements
-from pip._internal.self_outdated_check import (
-    make_link_collector,
-    pip_self_version_check,
-)
-from pip._internal.utils.misc import normalize_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from optparse import Values
-    from typing import List, Optional, Tuple
-    from pip._internal.cache import WheelCache
-    from pip._internal.models.target_python import TargetPython
-    from pip._internal.req.req_set import RequirementSet
-    from pip._internal.req.req_tracker import RequirementTracker
-    from pip._internal.utils.temp_dir import TempDirectory
-
-
-class SessionCommandMixin(CommandContextMixIn):
-
-    """
-    A class mixin for command classes needing _build_session().
-    """
-    def __init__(self):
-        super(SessionCommandMixin, self).__init__()
-        self._session = None  # Optional[PipSession]
-
-    @classmethod
-    def _get_index_urls(cls, options):
-        """Return a list of index urls from user-provided options."""
-        index_urls = []
-        if not getattr(options, "no_index", False):
-            url = getattr(options, "index_url", None)
-            if url:
-                index_urls.append(url)
-        urls = getattr(options, "extra_index_urls", None)
-        if urls:
-            index_urls.extend(urls)
-        # Return None rather than an empty list
-        return index_urls or None
-
-    def get_default_session(self, options):
-        # type: (Values) -> PipSession
-        """Get a default-managed session."""
-        if self._session is None:
-            self._session = self.enter_context(self._build_session(options))
-        return self._session
-
-    def _build_session(self, options, retries=None, timeout=None):
-        # type: (Values, Optional[int], Optional[int]) -> PipSession
-        session = PipSession(
-            cache=(
-                normalize_path(os.path.join(options.cache_dir, "http"))
-                if options.cache_dir else None
-            ),
-            retries=retries if retries is not None else options.retries,
-            trusted_hosts=options.trusted_hosts,
-            index_urls=self._get_index_urls(options),
-        )
-
-        # Handle custom ca-bundles from the user
-        if options.cert:
-            session.verify = options.cert
-
-        # Handle SSL client certificate
-        if options.client_cert:
-            session.cert = options.client_cert
-
-        # Handle timeouts
-        if options.timeout or timeout:
-            session.timeout = (
-                timeout if timeout is not None else options.timeout
-            )
-
-        # Handle configured proxies
-        if options.proxy:
-            session.proxies = {
-                "http": options.proxy,
-                "https": options.proxy,
-            }
-
-        # Determine if we can prompt the user for authentication or not
-        session.auth.prompting = not options.no_input
-
-        return session
-
-
-class IndexGroupCommand(Command, SessionCommandMixin):
-
-    """
-    Abstract base class for commands with the index_group options.
-
-    This also corresponds to the commands that permit the pip version check.
-    """
-
-    def handle_pip_version_check(self, options):
-        # type: (Values) -> None
-        """
-        Do the pip version check if not disabled.
-
-        This overrides the default behavior of not doing the check.
-        """
-        # Make sure the index_group options are present.
-        assert hasattr(options, 'no_index')
-
-        if options.disable_pip_version_check or options.no_index:
-            return
-
-        # Otherwise, check if we're using the latest version of pip available.
-        session = self._build_session(
-            options,
-            retries=0,
-            timeout=min(5, options.timeout)
-        )
-        with session:
-            pip_self_version_check(session, options)
-
-
-class RequirementCommand(IndexGroupCommand):
-
-    @staticmethod
-    def make_requirement_preparer(
-        temp_build_dir,           # type: TempDirectory
-        options,                  # type: Values
-        req_tracker,              # type: RequirementTracker
-        download_dir=None,        # type: str
-        wheel_download_dir=None,  # type: str
-    ):
-        # type: (...) -> RequirementPreparer
-        """
-        Create a RequirementPreparer instance for the given parameters.
-        """
-        temp_build_dir_path = temp_build_dir.path
-        assert temp_build_dir_path is not None
-        return RequirementPreparer(
-            build_dir=temp_build_dir_path,
-            src_dir=options.src_dir,
-            download_dir=download_dir,
-            wheel_download_dir=wheel_download_dir,
-            progress_bar=options.progress_bar,
-            build_isolation=options.build_isolation,
-            req_tracker=req_tracker,
-        )
-
-    @staticmethod
-    def make_resolver(
-        preparer,                            # type: RequirementPreparer
-        session,                             # type: PipSession
-        finder,                              # type: PackageFinder
-        options,                             # type: Values
-        wheel_cache=None,                    # type: Optional[WheelCache]
-        use_user_site=False,                 # type: bool
-        ignore_installed=True,               # type: bool
-        ignore_requires_python=False,        # type: bool
-        force_reinstall=False,               # type: bool
-        upgrade_strategy="to-satisfy-only",  # type: str
-        use_pep517=None,                     # type: Optional[bool]
-        py_version_info=None            # type: Optional[Tuple[int, ...]]
-    ):
-        # type: (...) -> Resolver
-        """
-        Create a Resolver instance for the given parameters.
-        """
-        make_install_req = partial(
-            install_req_from_req_string,
-            isolated=options.isolated_mode,
-            wheel_cache=wheel_cache,
-            use_pep517=use_pep517,
-        )
-        return Resolver(
-            preparer=preparer,
-            session=session,
-            finder=finder,
-            make_install_req=make_install_req,
-            use_user_site=use_user_site,
-            ignore_dependencies=options.ignore_dependencies,
-            ignore_installed=ignore_installed,
-            ignore_requires_python=ignore_requires_python,
-            force_reinstall=force_reinstall,
-            upgrade_strategy=upgrade_strategy,
-            py_version_info=py_version_info
-        )
-
-    def populate_requirement_set(
-        self,
-        requirement_set,  # type: RequirementSet
-        args,             # type: List[str]
-        options,          # type: Values
-        finder,           # type: PackageFinder
-        session,          # type: PipSession
-        wheel_cache,      # type: Optional[WheelCache]
-    ):
-        # type: (...) -> None
-        """
-        Marshal cmd line args into a requirement set.
-        """
-        # NOTE: As a side-effect, options.require_hashes and
-        #       requirement_set.require_hashes may be updated
-
-        for filename in options.constraints:
-            for req_to_add in parse_requirements(
-                    filename,
-                    constraint=True, finder=finder, options=options,
-                    session=session, wheel_cache=wheel_cache):
-                req_to_add.is_direct = True
-                requirement_set.add_requirement(req_to_add)
-
-        for req in args:
-            req_to_add = install_req_from_line(
-                req, None, isolated=options.isolated_mode,
-                use_pep517=options.use_pep517,
-                wheel_cache=wheel_cache
-            )
-            req_to_add.is_direct = True
-            requirement_set.add_requirement(req_to_add)
-
-        for req in options.editables:
-            req_to_add = install_req_from_editable(
-                req,
-                isolated=options.isolated_mode,
-                use_pep517=options.use_pep517,
-                wheel_cache=wheel_cache
-            )
-            req_to_add.is_direct = True
-            requirement_set.add_requirement(req_to_add)
-
-        for filename in options.requirements:
-            for req_to_add in parse_requirements(
-                    filename,
-                    finder=finder, options=options, session=session,
-                    wheel_cache=wheel_cache,
-                    use_pep517=options.use_pep517):
-                req_to_add.is_direct = True
-                requirement_set.add_requirement(req_to_add)
-        # If --require-hashes was a line in a requirements file, tell
-        # RequirementSet about it:
-        requirement_set.require_hashes = options.require_hashes
-
-        if not (args or options.editables or options.requirements):
-            opts = {'name': self.name}
-            if options.find_links:
-                raise CommandError(
-                    'You must give at least one requirement to %(name)s '
-                    '(maybe you meant "pip %(name)s %(links)s"?)' %
-                    dict(opts, links=' '.join(options.find_links)))
-            else:
-                raise CommandError(
-                    'You must give at least one requirement to %(name)s '
-                    '(see "pip help %(name)s")' % opts)
-
-    def _build_package_finder(
-        self,
-        options,               # type: Values
-        session,               # type: PipSession
-        target_python=None,    # type: Optional[TargetPython]
-        ignore_requires_python=None,  # type: Optional[bool]
-    ):
-        # type: (...) -> PackageFinder
-        """
-        Create a package finder appropriate to this requirement command.
-
-        :param ignore_requires_python: Whether to ignore incompatible
-            "Requires-Python" values in links. Defaults to False.
-        """
-        link_collector = make_link_collector(session, options=options)
-        selection_prefs = SelectionPreferences(
-            allow_yanked=True,
-            format_control=options.format_control,
-            allow_all_prereleases=options.pre,
-            prefer_binary=options.prefer_binary,
-            ignore_requires_python=ignore_requires_python,
-        )
-
-        return PackageFinder.create(
-            link_collector=link_collector,
-            selection_prefs=selection_prefs,
-            target_python=target_python,
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
deleted file mode 100644
index 275360a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/cli/status_codes.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from __future__ import absolute_import
-
-SUCCESS = 0
-ERROR = 1
-UNKNOWN_ERROR = 2
-VIRTUALENV_NOT_FOUND = 3
-PREVIOUS_BUILD_DIR_ERROR = 4
-NO_MATCHES_FOUND = 23
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/collector.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/collector.py
deleted file mode 100644
index e6ee598..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/collector.py
+++ /dev/null
@@ -1,548 +0,0 @@
-"""
-The main purpose of this module is to expose LinkCollector.collect_links().
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import cgi
-import itertools
-import logging
-import mimetypes
-import os
-from collections import OrderedDict
-
-from pip._vendor import html5lib, requests
-from pip._vendor.distlib.compat import unescape
-from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.models.link import Link
-from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
-from pip._internal.utils.misc import redact_auth_from_url
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url, url_to_path
-from pip._internal.vcs import is_url, vcs
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Callable, Dict, Iterable, List, MutableMapping, Optional, Sequence,
-        Tuple, Union,
-    )
-    import xml.etree.ElementTree
-
-    from pip._vendor.requests import Response
-
-    from pip._internal.models.search_scope import SearchScope
-    from pip._internal.network.session import PipSession
-
-    HTMLElement = xml.etree.ElementTree.Element
-    ResponseHeaders = MutableMapping[str, str]
-
-
-logger = logging.getLogger(__name__)
-
-
-def _match_vcs_scheme(url):
-    # type: (str) -> Optional[str]
-    """Look for VCS schemes in the URL.
-
-    Returns the matched VCS scheme, or None if there's no match.
-    """
-    for scheme in vcs.schemes:
-        if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
-            return scheme
-    return None
-
-
-def _is_url_like_archive(url):
-    # type: (str) -> bool
-    """Return whether the URL looks like an archive.
-    """
-    filename = Link(url).filename
-    for bad_ext in ARCHIVE_EXTENSIONS:
-        if filename.endswith(bad_ext):
-            return True
-    return False
-
-
-class _NotHTML(Exception):
-    def __init__(self, content_type, request_desc):
-        # type: (str, str) -> None
-        super(_NotHTML, self).__init__(content_type, request_desc)
-        self.content_type = content_type
-        self.request_desc = request_desc
-
-
-def _ensure_html_header(response):
-    # type: (Response) -> None
-    """Check the Content-Type header to ensure the response contains HTML.
-
-    Raises `_NotHTML` if the content type is not text/html.
-    """
-    content_type = response.headers.get("Content-Type", "")
-    if not content_type.lower().startswith("text/html"):
-        raise _NotHTML(content_type, response.request.method)
-
-
-class _NotHTTP(Exception):
-    pass
-
-
-def _ensure_html_response(url, session):
-    # type: (str, PipSession) -> None
-    """Send a HEAD request to the URL, and ensure the response contains HTML.
-
-    Raises `_NotHTTP` if the URL is not available for a HEAD request, or
-    `_NotHTML` if the content type is not text/html.
-    """
-    scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
-    if scheme not in {'http', 'https'}:
-        raise _NotHTTP()
-
-    resp = session.head(url, allow_redirects=True)
-    resp.raise_for_status()
-
-    _ensure_html_header(resp)
-
-
-def _get_html_response(url, session):
-    # type: (str, PipSession) -> Response
-    """Access an HTML page with GET, and return the response.
-
-    This consists of three parts:
-
-    1. If the URL looks suspiciously like an archive, send a HEAD first to
-       check the Content-Type is HTML, to avoid downloading a large file.
-       Raise `_NotHTTP` if the content type cannot be determined, or
-       `_NotHTML` if it is not HTML.
-    2. Actually perform the request. Raise HTTP exceptions on network failures.
-    3. Check the Content-Type header to make sure we got HTML, and raise
-       `_NotHTML` otherwise.
-    """
-    if _is_url_like_archive(url):
-        _ensure_html_response(url, session=session)
-
-    logger.debug('Getting page %s', redact_auth_from_url(url))
-
-    resp = session.get(
-        url,
-        headers={
-            "Accept": "text/html",
-            # We don't want to blindly returned cached data for
-            # /simple/, because authors generally expecting that
-            # twine upload && pip install will function, but if
-            # they've done a pip install in the last ~10 minutes
-            # it won't. Thus by setting this to zero we will not
-            # blindly use any cached data, however the benefit of
-            # using max-age=0 instead of no-cache, is that we will
-            # still support conditional requests, so we will still
-            # minimize traffic sent in cases where the page hasn't
-            # changed at all, we will just always incur the round
-            # trip for the conditional GET now instead of only
-            # once per 10 minutes.
-            # For more information, please see pypa/pip#5670.
-            "Cache-Control": "max-age=0",
-        },
-    )
-    resp.raise_for_status()
-
-    # The check for archives above only works if the url ends with
-    # something that looks like an archive. However that is not a
-    # requirement of an url. Unless we issue a HEAD request on every
-    # url we cannot know ahead of time for sure if something is HTML
-    # or not. However we can check after we've downloaded it.
-    _ensure_html_header(resp)
-
-    return resp
-
-
-def _get_encoding_from_headers(headers):
-    # type: (ResponseHeaders) -> Optional[str]
-    """Determine if we have any encoding information in our headers.
-    """
-    if headers and "Content-Type" in headers:
-        content_type, params = cgi.parse_header(headers["Content-Type"])
-        if "charset" in params:
-            return params['charset']
-    return None
-
-
-def _determine_base_url(document, page_url):
-    # type: (HTMLElement, str) -> str
-    """Determine the HTML document's base URL.
-
-    This looks for a ```` tag in the HTML document. If present, its href
-    attribute denotes the base URL of anchor tags in the document. If there is
-    no such tag (or if it does not have a valid href attribute), the HTML
-    file's URL is used as the base URL.
-
-    :param document: An HTML document representation. The current
-        implementation expects the result of ``html5lib.parse()``.
-    :param page_url: The URL of the HTML document.
-    """
-    for base in document.findall(".//base"):
-        href = base.get("href")
-        if href is not None:
-            return href
-    return page_url
-
-
-def _clean_link(url):
-    # type: (str) -> str
-    """Makes sure a link is fully encoded.  That is, if a ' ' shows up in
-    the link, it will be rewritten to %20 (while not over-quoting
-    % or other characters)."""
-    # Split the URL into parts according to the general structure
-    # `scheme://netloc/path;parameters?query#fragment`. Note that the
-    # `netloc` can be empty and the URI will then refer to a local
-    # filesystem path.
-    result = urllib_parse.urlparse(url)
-    # In both cases below we unquote prior to quoting to make sure
-    # nothing is double quoted.
-    if result.netloc == "":
-        # On Windows the path part might contain a drive letter which
-        # should not be quoted. On Linux where drive letters do not
-        # exist, the colon should be quoted. We rely on urllib.request
-        # to do the right thing here.
-        path = urllib_request.pathname2url(
-            urllib_request.url2pathname(result.path))
-    else:
-        # In addition to the `/` character we protect `@` so that
-        # revision strings in VCS URLs are properly parsed.
-        path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@")
-    return urllib_parse.urlunparse(result._replace(path=path))
-
-
-def _create_link_from_element(
-    anchor,    # type: HTMLElement
-    page_url,  # type: str
-    base_url,  # type: str
-):
-    # type: (...) -> Optional[Link]
-    """
-    Convert an anchor element in a simple repository page to a Link.
-    """
-    href = anchor.get("href")
-    if not href:
-        return None
-
-    url = _clean_link(urllib_parse.urljoin(base_url, href))
-    pyrequire = anchor.get('data-requires-python')
-    pyrequire = unescape(pyrequire) if pyrequire else None
-
-    yanked_reason = anchor.get('data-yanked')
-    if yanked_reason:
-        # This is a unicode string in Python 2 (and 3).
-        yanked_reason = unescape(yanked_reason)
-
-    link = Link(
-        url,
-        comes_from=page_url,
-        requires_python=pyrequire,
-        yanked_reason=yanked_reason,
-    )
-
-    return link
-
-
-def parse_links(page):
-    # type: (HTMLPage) -> Iterable[Link]
-    """
-    Parse an HTML document, and yield its anchor elements as Link objects.
-    """
-    document = html5lib.parse(
-        page.content,
-        transport_encoding=page.encoding,
-        namespaceHTMLElements=False,
-    )
-
-    url = page.url
-    base_url = _determine_base_url(document, url)
-    for anchor in document.findall(".//a"):
-        link = _create_link_from_element(
-            anchor,
-            page_url=url,
-            base_url=base_url,
-        )
-        if link is None:
-            continue
-        yield link
-
-
-class HTMLPage(object):
-    """Represents one page, along with its URL"""
-
-    def __init__(
-        self,
-        content,   # type: bytes
-        encoding,  # type: Optional[str]
-        url,       # type: str
-    ):
-        # type: (...) -> None
-        """
-        :param encoding: the encoding to decode the given content.
-        :param url: the URL from which the HTML was downloaded.
-        """
-        self.content = content
-        self.encoding = encoding
-        self.url = url
-
-    def __str__(self):
-        return redact_auth_from_url(self.url)
-
-
-def _handle_get_page_fail(
-    link,  # type: Link
-    reason,  # type: Union[str, Exception]
-    meth=None  # type: Optional[Callable[..., None]]
-):
-    # type: (...) -> None
-    if meth is None:
-        meth = logger.debug
-    meth("Could not fetch URL %s: %s - skipping", link, reason)
-
-
-def _make_html_page(response):
-    # type: (Response) -> HTMLPage
-    encoding = _get_encoding_from_headers(response.headers)
-    return HTMLPage(response.content, encoding=encoding, url=response.url)
-
-
-def _get_html_page(link, session=None):
-    # type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
-    if session is None:
-        raise TypeError(
-            "_get_html_page() missing 1 required keyword argument: 'session'"
-        )
-
-    url = link.url.split('#', 1)[0]
-
-    # Check for VCS schemes that do not support lookup as web pages.
-    vcs_scheme = _match_vcs_scheme(url)
-    if vcs_scheme:
-        logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
-        return None
-
-    # Tack index.html onto file:// URLs that point to directories
-    scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
-    if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
-        # add trailing slash if not present so urljoin doesn't trim
-        # final segment
-        if not url.endswith('/'):
-            url += '/'
-        url = urllib_parse.urljoin(url, 'index.html')
-        logger.debug(' file: URL is directory, getting %s', url)
-
-    try:
-        resp = _get_html_response(url, session=session)
-    except _NotHTTP:
-        logger.debug(
-            'Skipping page %s because it looks like an archive, and cannot '
-            'be checked by HEAD.', link,
-        )
-    except _NotHTML as exc:
-        logger.debug(
-            'Skipping page %s because the %s request got Content-Type: %s',
-            link, exc.request_desc, exc.content_type,
-        )
-    except HTTPError as exc:
-        _handle_get_page_fail(link, exc)
-    except RetryError as exc:
-        _handle_get_page_fail(link, exc)
-    except SSLError as exc:
-        reason = "There was a problem confirming the ssl certificate: "
-        reason += str(exc)
-        _handle_get_page_fail(link, reason, meth=logger.info)
-    except requests.ConnectionError as exc:
-        _handle_get_page_fail(link, "connection error: %s" % exc)
-    except requests.Timeout:
-        _handle_get_page_fail(link, "timed out")
-    else:
-        return _make_html_page(resp)
-    return None
-
-
-def _remove_duplicate_links(links):
-    # type: (Iterable[Link]) -> List[Link]
-    """
-    Return a list of links, with duplicates removed and ordering preserved.
-    """
-    # We preserve the ordering when removing duplicates because we can.
-    return list(OrderedDict.fromkeys(links))
-
-
-def group_locations(locations, expand_dir=False):
-    # type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
-    """
-    Divide a list of locations into two groups: "files" (archives) and "urls."
-
-    :return: A pair of lists (files, urls).
-    """
-    files = []
-    urls = []
-
-    # puts the url for the given file path into the appropriate list
-    def sort_path(path):
-        url = path_to_url(path)
-        if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
-            urls.append(url)
-        else:
-            files.append(url)
-
-    for url in locations:
-
-        is_local_path = os.path.exists(url)
-        is_file_url = url.startswith('file:')
-
-        if is_local_path or is_file_url:
-            if is_local_path:
-                path = url
-            else:
-                path = url_to_path(url)
-            if os.path.isdir(path):
-                if expand_dir:
-                    path = os.path.realpath(path)
-                    for item in os.listdir(path):
-                        sort_path(os.path.join(path, item))
-                elif is_file_url:
-                    urls.append(url)
-                else:
-                    logger.warning(
-                        "Path '{0}' is ignored: "
-                        "it is a directory.".format(path),
-                    )
-            elif os.path.isfile(path):
-                sort_path(path)
-            else:
-                logger.warning(
-                    "Url '%s' is ignored: it is neither a file "
-                    "nor a directory.", url,
-                )
-        elif is_url(url):
-            # Only add url with clear scheme
-            urls.append(url)
-        else:
-            logger.warning(
-                "Url '%s' is ignored. It is either a non-existing "
-                "path or lacks a specific scheme.", url,
-            )
-
-    return files, urls
-
-
-class CollectedLinks(object):
-
-    """
-    Encapsulates all the Link objects collected by a call to
-    LinkCollector.collect_links(), stored separately as--
-
-    (1) links from the configured file locations,
-    (2) links from the configured find_links, and
-    (3) a dict mapping HTML page url to links from that page.
-    """
-
-    def __init__(
-        self,
-        files,       # type: List[Link]
-        find_links,  # type: List[Link]
-        pages,       # type: Dict[str, List[Link]]
-    ):
-        # type: (...) -> None
-        """
-        :param files: Links from file locations.
-        :param find_links: Links from find_links.
-        :param pages: A dict mapping HTML page url to links from that page.
-        """
-        self.files = files
-        self.find_links = find_links
-        self.pages = pages
-
-
-class LinkCollector(object):
-
-    """
-    Responsible for collecting Link objects from all configured locations,
-    making network requests as needed.
-
-    The class's main method is its collect_links() method.
-    """
-
-    def __init__(
-        self,
-        session,       # type: PipSession
-        search_scope,  # type: SearchScope
-    ):
-        # type: (...) -> None
-        self.search_scope = search_scope
-        self.session = session
-
-    @property
-    def find_links(self):
-        # type: () -> List[str]
-        return self.search_scope.find_links
-
-    def _get_pages(self, locations):
-        # type: (Iterable[Link]) -> Iterable[HTMLPage]
-        """
-        Yields (page, page_url) from the given locations, skipping
-        locations that have errors.
-        """
-        for location in locations:
-            page = _get_html_page(location, session=self.session)
-            if page is None:
-                continue
-
-            yield page
-
-    def collect_links(self, project_name):
-        # type: (str) -> CollectedLinks
-        """Find all available links for the given project name.
-
-        :return: All the Link objects (unfiltered), as a CollectedLinks object.
-        """
-        search_scope = self.search_scope
-        index_locations = search_scope.get_index_urls_locations(project_name)
-        index_file_loc, index_url_loc = group_locations(index_locations)
-        fl_file_loc, fl_url_loc = group_locations(
-            self.find_links, expand_dir=True,
-        )
-
-        file_links = [
-            Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)
-        ]
-
-        # We trust every directly linked archive in find_links
-        find_link_links = [Link(url, '-f') for url in self.find_links]
-
-        # We trust every url that the user has given us whether it was given
-        # via --index-url or --find-links.
-        # We want to filter out anything that does not have a secure origin.
-        url_locations = [
-            link for link in itertools.chain(
-                (Link(url) for url in index_url_loc),
-                (Link(url) for url in fl_url_loc),
-            )
-            if self.session.is_secure_origin(link)
-        ]
-
-        url_locations = _remove_duplicate_links(url_locations)
-        lines = [
-            '{} location(s) to search for versions of {}:'.format(
-                len(url_locations), project_name,
-            ),
-        ]
-        for link in url_locations:
-            lines.append('* {}'.format(link))
-        logger.debug('\n'.join(lines))
-
-        pages_links = {}
-        for page in self._get_pages(url_locations):
-            pages_links[page.url] = list(parse_links(page))
-
-        return CollectedLinks(
-            files=file_links,
-            find_links=find_link_links,
-            pages=pages_links,
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
deleted file mode 100644
index 2a311f8..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/__init__.py
+++ /dev/null
@@ -1,114 +0,0 @@
-"""
-Package containing all pip commands
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import importlib
-from collections import OrderedDict, namedtuple
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any
-    from pip._internal.cli.base_command import Command
-
-
-CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
-
-# The ordering matters for help display.
-#    Also, even though the module path starts with the same
-# "pip._internal.commands" prefix in each case, we include the full path
-# because it makes testing easier (specifically when modifying commands_dict
-# in test setup / teardown by adding info for a FakeCommand class defined
-# in a test-related module).
-#    Finally, we need to pass an iterable of pairs here rather than a dict
-# so that the ordering won't be lost when using Python 2.7.
-commands_dict = OrderedDict([
-    ('install', CommandInfo(
-        'pip._internal.commands.install', 'InstallCommand',
-        'Install packages.',
-    )),
-    ('download', CommandInfo(
-        'pip._internal.commands.download', 'DownloadCommand',
-        'Download packages.',
-    )),
-    ('uninstall', CommandInfo(
-        'pip._internal.commands.uninstall', 'UninstallCommand',
-        'Uninstall packages.',
-    )),
-    ('freeze', CommandInfo(
-        'pip._internal.commands.freeze', 'FreezeCommand',
-        'Output installed packages in requirements format.',
-    )),
-    ('list', CommandInfo(
-        'pip._internal.commands.list', 'ListCommand',
-        'List installed packages.',
-    )),
-    ('show', CommandInfo(
-        'pip._internal.commands.show', 'ShowCommand',
-        'Show information about installed packages.',
-    )),
-    ('check', CommandInfo(
-        'pip._internal.commands.check', 'CheckCommand',
-        'Verify installed packages have compatible dependencies.',
-    )),
-    ('config', CommandInfo(
-        'pip._internal.commands.configuration', 'ConfigurationCommand',
-        'Manage local and global configuration.',
-    )),
-    ('search', CommandInfo(
-        'pip._internal.commands.search', 'SearchCommand',
-        'Search PyPI for packages.',
-    )),
-    ('wheel', CommandInfo(
-        'pip._internal.commands.wheel', 'WheelCommand',
-        'Build wheels from your requirements.',
-    )),
-    ('hash', CommandInfo(
-        'pip._internal.commands.hash', 'HashCommand',
-        'Compute hashes of package archives.',
-    )),
-    ('completion', CommandInfo(
-        'pip._internal.commands.completion', 'CompletionCommand',
-        'A helper command used for command completion.',
-    )),
-    ('debug', CommandInfo(
-        'pip._internal.commands.debug', 'DebugCommand',
-        'Show information useful for debugging.',
-    )),
-    ('help', CommandInfo(
-        'pip._internal.commands.help', 'HelpCommand',
-        'Show help for commands.',
-    )),
-])  # type: OrderedDict[str, CommandInfo]
-
-
-def create_command(name, **kwargs):
-    # type: (str, **Any) -> Command
-    """
-    Create an instance of the Command class with the given name.
-    """
-    module_path, class_name, summary = commands_dict[name]
-    module = importlib.import_module(module_path)
-    command_class = getattr(module, class_name)
-    command = command_class(name=name, summary=summary, **kwargs)
-
-    return command
-
-
-def get_similar_commands(name):
-    """Command name auto-correct."""
-    from difflib import get_close_matches
-
-    name = name.lower()
-
-    close_commands = get_close_matches(name, commands_dict.keys())
-
-    if close_commands:
-        return close_commands[0]
-    else:
-        return False
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/check.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/check.py
deleted file mode 100644
index 9689446..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/check.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import logging
-
-from pip._internal.cli.base_command import Command
-from pip._internal.operations.check import (
-    check_package_set,
-    create_package_set_from_installed,
-)
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class CheckCommand(Command):
-    """Verify installed packages have compatible dependencies."""
-
-    usage = """
-      %prog [options]"""
-
-    def run(self, options, args):
-        package_set, parsing_probs = create_package_set_from_installed()
-        missing, conflicting = check_package_set(package_set)
-
-        for project_name in missing:
-            version = package_set[project_name].version
-            for dependency in missing[project_name]:
-                write_output(
-                    "%s %s requires %s, which is not installed.",
-                    project_name, version, dependency[0],
-                )
-
-        for project_name in conflicting:
-            version = package_set[project_name].version
-            for dep_name, dep_version, req in conflicting[project_name]:
-                write_output(
-                    "%s %s has requirement %s, but you have %s %s.",
-                    project_name, version, req, dep_name, dep_version,
-                )
-
-        if missing or conflicting or parsing_probs:
-            return 1
-        else:
-            write_output("No broken requirements found.")
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/completion.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/completion.py
deleted file mode 100644
index c532806..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/completion.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import sys
-import textwrap
-
-from pip._internal.cli.base_command import Command
-from pip._internal.utils.misc import get_prog
-
-BASE_COMPLETION = """
-# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
-"""
-
-COMPLETION_SCRIPTS = {
-    'bash': """
-        _pip_completion()
-        {
-            COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
-                           COMP_CWORD=$COMP_CWORD \\
-                           PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
-        }
-        complete -o default -F _pip_completion %(prog)s
-    """,
-    'zsh': """
-        function _pip_completion {
-          local words cword
-          read -Ac words
-          read -cn cword
-          reply=( $( COMP_WORDS="$words[*]" \\
-                     COMP_CWORD=$(( cword-1 )) \\
-                     PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
-        }
-        compctl -K _pip_completion %(prog)s
-    """,
-    'fish': """
-        function __fish_complete_pip
-            set -lx COMP_WORDS (commandline -o) ""
-            set -lx COMP_CWORD ( \\
-                math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
-            )
-            set -lx PIP_AUTO_COMPLETE 1
-            string split \\  -- (eval $COMP_WORDS[1])
-        end
-        complete -fa "(__fish_complete_pip)" -c %(prog)s
-    """,
-}
-
-
-class CompletionCommand(Command):
-    """A helper command to be used for command completion."""
-
-    ignore_require_venv = True
-
-    def __init__(self, *args, **kw):
-        super(CompletionCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-
-        cmd_opts.add_option(
-            '--bash', '-b',
-            action='store_const',
-            const='bash',
-            dest='shell',
-            help='Emit completion code for bash')
-        cmd_opts.add_option(
-            '--zsh', '-z',
-            action='store_const',
-            const='zsh',
-            dest='shell',
-            help='Emit completion code for zsh')
-        cmd_opts.add_option(
-            '--fish', '-f',
-            action='store_const',
-            const='fish',
-            dest='shell',
-            help='Emit completion code for fish')
-
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def run(self, options, args):
-        """Prints the completion code of the given shell"""
-        shells = COMPLETION_SCRIPTS.keys()
-        shell_options = ['--' + shell for shell in sorted(shells)]
-        if options.shell in shells:
-            script = textwrap.dedent(
-                COMPLETION_SCRIPTS.get(options.shell, '') % {
-                    'prog': get_prog(),
-                }
-            )
-            print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
-        else:
-            sys.stderr.write(
-                'ERROR: You must pass %s\n' % ' or '.join(shell_options)
-            )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/configuration.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
deleted file mode 100644
index efcf5bb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/configuration.py
+++ /dev/null
@@ -1,233 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import logging
-import os
-import subprocess
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.configuration import (
-    Configuration,
-    get_configuration_files,
-    kinds,
-)
-from pip._internal.exceptions import PipError
-from pip._internal.utils.misc import get_prog, write_output
-
-logger = logging.getLogger(__name__)
-
-
-class ConfigurationCommand(Command):
-    """Manage local and global configuration.
-
-        Subcommands:
-
-        list: List the active configuration (or from the file specified)
-        edit: Edit the configuration file in an editor
-        get: Get the value associated with name
-        set: Set the name=value
-        unset: Unset the value associated with name
-
-        If none of --user, --global and --site are passed, a virtual
-        environment configuration file is used if one is active and the file
-        exists. Otherwise, all modifications happen on the to the user file by
-        default.
-    """
-
-    ignore_require_venv = True
-    usage = """
-        %prog [] list
-        %prog [] [--editor ] edit
-
-        %prog [] get name
-        %prog [] set name value
-        %prog [] unset name
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(ConfigurationCommand, self).__init__(*args, **kwargs)
-
-        self.configuration = None
-
-        self.cmd_opts.add_option(
-            '--editor',
-            dest='editor',
-            action='store',
-            default=None,
-            help=(
-                'Editor to use to edit the file. Uses VISUAL or EDITOR '
-                'environment variables if not provided.'
-            )
-        )
-
-        self.cmd_opts.add_option(
-            '--global',
-            dest='global_file',
-            action='store_true',
-            default=False,
-            help='Use the system-wide configuration file only'
-        )
-
-        self.cmd_opts.add_option(
-            '--user',
-            dest='user_file',
-            action='store_true',
-            default=False,
-            help='Use the user configuration file only'
-        )
-
-        self.cmd_opts.add_option(
-            '--site',
-            dest='site_file',
-            action='store_true',
-            default=False,
-            help='Use the current environment configuration file only'
-        )
-
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        handlers = {
-            "list": self.list_values,
-            "edit": self.open_in_editor,
-            "get": self.get_name,
-            "set": self.set_name_value,
-            "unset": self.unset_name
-        }
-
-        # Determine action
-        if not args or args[0] not in handlers:
-            logger.error("Need an action ({}) to perform.".format(
-                ", ".join(sorted(handlers)))
-            )
-            return ERROR
-
-        action = args[0]
-
-        # Determine which configuration files are to be loaded
-        #    Depends on whether the command is modifying.
-        try:
-            load_only = self._determine_file(
-                options, need_value=(action in ["get", "set", "unset", "edit"])
-            )
-        except PipError as e:
-            logger.error(e.args[0])
-            return ERROR
-
-        # Load a new configuration
-        self.configuration = Configuration(
-            isolated=options.isolated_mode, load_only=load_only
-        )
-        self.configuration.load()
-
-        # Error handling happens here, not in the action-handlers.
-        try:
-            handlers[action](options, args[1:])
-        except PipError as e:
-            logger.error(e.args[0])
-            return ERROR
-
-        return SUCCESS
-
-    def _determine_file(self, options, need_value):
-        file_options = [key for key, value in (
-            (kinds.USER, options.user_file),
-            (kinds.GLOBAL, options.global_file),
-            (kinds.SITE, options.site_file),
-        ) if value]
-
-        if not file_options:
-            if not need_value:
-                return None
-            # Default to user, unless there's a site file.
-            elif any(
-                os.path.exists(site_config_file)
-                for site_config_file in get_configuration_files()[kinds.SITE]
-            ):
-                return kinds.SITE
-            else:
-                return kinds.USER
-        elif len(file_options) == 1:
-            return file_options[0]
-
-        raise PipError(
-            "Need exactly one file to operate upon "
-            "(--user, --site, --global) to perform."
-        )
-
-    def list_values(self, options, args):
-        self._get_n_args(args, "list", n=0)
-
-        for key, value in sorted(self.configuration.items()):
-            write_output("%s=%r", key, value)
-
-    def get_name(self, options, args):
-        key = self._get_n_args(args, "get [name]", n=1)
-        value = self.configuration.get_value(key)
-
-        write_output("%s", value)
-
-    def set_name_value(self, options, args):
-        key, value = self._get_n_args(args, "set [name] [value]", n=2)
-        self.configuration.set_value(key, value)
-
-        self._save_configuration()
-
-    def unset_name(self, options, args):
-        key = self._get_n_args(args, "unset [name]", n=1)
-        self.configuration.unset_value(key)
-
-        self._save_configuration()
-
-    def open_in_editor(self, options, args):
-        editor = self._determine_editor(options)
-
-        fname = self.configuration.get_file_to_edit()
-        if fname is None:
-            raise PipError("Could not determine appropriate file.")
-
-        try:
-            subprocess.check_call([editor, fname])
-        except subprocess.CalledProcessError as e:
-            raise PipError(
-                "Editor Subprocess exited with exit code {}"
-                .format(e.returncode)
-            )
-
-    def _get_n_args(self, args, example, n):
-        """Helper to make sure the command got the right number of arguments
-        """
-        if len(args) != n:
-            msg = (
-                'Got unexpected number of arguments, expected {}. '
-                '(example: "{} config {}")'
-            ).format(n, get_prog(), example)
-            raise PipError(msg)
-
-        if n == 1:
-            return args[0]
-        else:
-            return args
-
-    def _save_configuration(self):
-        # We successfully ran a modifying command. Need to save the
-        # configuration.
-        try:
-            self.configuration.save()
-        except Exception:
-            logger.error(
-                "Unable to save configuration. Please report this as a bug.",
-                exc_info=1
-            )
-            raise PipError("Internal Error.")
-
-    def _determine_editor(self, options):
-        if options.editor is not None:
-            return options.editor
-        elif "VISUAL" in os.environ:
-            return os.environ["VISUAL"]
-        elif "EDITOR" in os.environ:
-            return os.environ["EDITOR"]
-        else:
-            raise PipError("Could not determine editor to use.")
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/debug.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/debug.py
deleted file mode 100644
index 5322c82..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/debug.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import locale
-import logging
-import sys
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import get_pip_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import format_tag
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, List
-    from optparse import Values
-
-logger = logging.getLogger(__name__)
-
-
-def show_value(name, value):
-    # type: (str, str) -> None
-    logger.info('{}: {}'.format(name, value))
-
-
-def show_sys_implementation():
-    # type: () -> None
-    logger.info('sys.implementation:')
-    if hasattr(sys, 'implementation'):
-        implementation = sys.implementation  # type: ignore
-        implementation_name = implementation.name
-    else:
-        implementation_name = ''
-
-    with indent_log():
-        show_value('name', implementation_name)
-
-
-def show_tags(options):
-    # type: (Values) -> None
-    tag_limit = 10
-
-    target_python = make_target_python(options)
-    tags = target_python.get_tags()
-
-    # Display the target options that were explicitly provided.
-    formatted_target = target_python.format_given()
-    suffix = ''
-    if formatted_target:
-        suffix = ' (target: {})'.format(formatted_target)
-
-    msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
-    logger.info(msg)
-
-    if options.verbose < 1 and len(tags) > tag_limit:
-        tags_limited = True
-        tags = tags[:tag_limit]
-    else:
-        tags_limited = False
-
-    with indent_log():
-        for tag in tags:
-            logger.info(format_tag(tag))
-
-        if tags_limited:
-            msg = (
-                '...\n'
-                '[First {tag_limit} tags shown. Pass --verbose to show all.]'
-            ).format(tag_limit=tag_limit)
-            logger.info(msg)
-
-
-class DebugCommand(Command):
-    """
-    Display debug information.
-    """
-
-    usage = """
-      %prog """
-    ignore_require_venv = True
-
-    def __init__(self, *args, **kw):
-        super(DebugCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-        cmdoptions.add_target_python_options(cmd_opts)
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def run(self, options, args):
-        # type: (Values, List[Any]) -> int
-        logger.warning(
-            "This command is only meant for debugging. "
-            "Do not use this with automation for parsing and getting these "
-            "details, since the output and options of this command may "
-            "change without notice."
-        )
-        show_value('pip version', get_pip_version())
-        show_value('sys.version', sys.version)
-        show_value('sys.executable', sys.executable)
-        show_value('sys.getdefaultencoding', sys.getdefaultencoding())
-        show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
-        show_value(
-            'locale.getpreferredencoding', locale.getpreferredencoding(),
-        )
-        show_value('sys.platform', sys.platform)
-        show_sys_implementation()
-
-        show_tags(options)
-
-        return SUCCESS
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/download.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/download.py
deleted file mode 100644
index a63019f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/download.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.req_command import RequirementCommand
-from pip._internal.req import RequirementSet
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-class DownloadCommand(RequirementCommand):
-    """
-    Download packages from:
-
-    - PyPI (and other indexes) using requirement specifiers.
-    - VCS project urls.
-    - Local project directories.
-    - Local or remote source archives.
-
-    pip also supports downloading from "requirements files", which provide
-    an easy way to specify a whole environment to be downloaded.
-    """
-
-    usage = """
-      %prog [options]  [package-index-options] ...
-      %prog [options] -r  [package-index-options] ...
-      %prog [options]  ...
-      %prog [options]  ...
-      %prog [options]  ..."""
-
-    def __init__(self, *args, **kw):
-        super(DownloadCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-
-        cmd_opts.add_option(cmdoptions.constraints())
-        cmd_opts.add_option(cmdoptions.requirements())
-        cmd_opts.add_option(cmdoptions.build_dir())
-        cmd_opts.add_option(cmdoptions.no_deps())
-        cmd_opts.add_option(cmdoptions.global_options())
-        cmd_opts.add_option(cmdoptions.no_binary())
-        cmd_opts.add_option(cmdoptions.only_binary())
-        cmd_opts.add_option(cmdoptions.prefer_binary())
-        cmd_opts.add_option(cmdoptions.src())
-        cmd_opts.add_option(cmdoptions.pre())
-        cmd_opts.add_option(cmdoptions.no_clean())
-        cmd_opts.add_option(cmdoptions.require_hashes())
-        cmd_opts.add_option(cmdoptions.progress_bar())
-        cmd_opts.add_option(cmdoptions.no_build_isolation())
-        cmd_opts.add_option(cmdoptions.use_pep517())
-        cmd_opts.add_option(cmdoptions.no_use_pep517())
-
-        cmd_opts.add_option(
-            '-d', '--dest', '--destination-dir', '--destination-directory',
-            dest='download_dir',
-            metavar='dir',
-            default=os.curdir,
-            help=("Download packages into ."),
-        )
-
-        cmdoptions.add_target_python_options(cmd_opts)
-
-        index_opts = cmdoptions.make_option_group(
-            cmdoptions.index_group,
-            self.parser,
-        )
-
-        self.parser.insert_option_group(0, index_opts)
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def run(self, options, args):
-        options.ignore_installed = True
-        # editable doesn't really make sense for `pip download`, but the bowels
-        # of the RequirementSet code require that property.
-        options.editables = []
-
-        cmdoptions.check_dist_restriction(options)
-
-        options.src_dir = os.path.abspath(options.src_dir)
-        options.download_dir = normalize_path(options.download_dir)
-
-        ensure_dir(options.download_dir)
-
-        session = self.get_default_session(options)
-
-        target_python = make_target_python(options)
-        finder = self._build_package_finder(
-            options=options,
-            session=session,
-            target_python=target_python,
-        )
-        build_delete = (not (options.no_clean or options.build_dir))
-        if options.cache_dir and not check_path_owner(options.cache_dir):
-            logger.warning(
-                "The directory '%s' or its parent directory is not owned "
-                "by the current user and caching wheels has been "
-                "disabled. check the permissions and owner of that "
-                "directory. If executing pip with sudo, you may want "
-                "sudo's -H flag.",
-                options.cache_dir,
-            )
-            options.cache_dir = None
-
-        with RequirementTracker() as req_tracker, TempDirectory(
-            options.build_dir, delete=build_delete, kind="download"
-        ) as directory:
-
-            requirement_set = RequirementSet(
-                require_hashes=options.require_hashes,
-            )
-            self.populate_requirement_set(
-                requirement_set,
-                args,
-                options,
-                finder,
-                session,
-                None
-            )
-
-            preparer = self.make_requirement_preparer(
-                temp_build_dir=directory,
-                options=options,
-                req_tracker=req_tracker,
-                download_dir=options.download_dir,
-            )
-
-            resolver = self.make_resolver(
-                preparer=preparer,
-                finder=finder,
-                session=session,
-                options=options,
-                py_version_info=options.python_version,
-            )
-            resolver.resolve(requirement_set)
-
-            downloaded = ' '.join([
-                req.name for req in requirement_set.successfully_downloaded
-            ])
-            if downloaded:
-                write_output('Successfully downloaded %s', downloaded)
-
-            # Clean up
-            if not options.no_clean:
-                requirement_set.cleanup_files()
-
-        return requirement_set
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/freeze.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
deleted file mode 100644
index c59eb39..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/freeze.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import sys
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.models.format_control import FormatControl
-from pip._internal.operations.freeze import freeze
-from pip._internal.utils.compat import stdlib_pkgs
-
-DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
-
-
-class FreezeCommand(Command):
-    """
-    Output installed packages in requirements format.
-
-    packages are listed in a case-insensitive sorted order.
-    """
-
-    usage = """
-      %prog [options]"""
-    log_streams = ("ext://sys.stderr", "ext://sys.stderr")
-
-    def __init__(self, *args, **kw):
-        super(FreezeCommand, self).__init__(*args, **kw)
-
-        self.cmd_opts.add_option(
-            '-r', '--requirement',
-            dest='requirements',
-            action='append',
-            default=[],
-            metavar='file',
-            help="Use the order in the given requirements file and its "
-                 "comments when generating output. This option can be "
-                 "used multiple times.")
-        self.cmd_opts.add_option(
-            '-f', '--find-links',
-            dest='find_links',
-            action='append',
-            default=[],
-            metavar='URL',
-            help='URL for finding packages, which will be added to the '
-                 'output.')
-        self.cmd_opts.add_option(
-            '-l', '--local',
-            dest='local',
-            action='store_true',
-            default=False,
-            help='If in a virtualenv that has global access, do not output '
-                 'globally-installed packages.')
-        self.cmd_opts.add_option(
-            '--user',
-            dest='user',
-            action='store_true',
-            default=False,
-            help='Only output packages installed in user-site.')
-        self.cmd_opts.add_option(cmdoptions.list_path())
-        self.cmd_opts.add_option(
-            '--all',
-            dest='freeze_all',
-            action='store_true',
-            help='Do not skip these packages in the output:'
-                 ' %s' % ', '.join(DEV_PKGS))
-        self.cmd_opts.add_option(
-            '--exclude-editable',
-            dest='exclude_editable',
-            action='store_true',
-            help='Exclude editable package from output.')
-
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        format_control = FormatControl(set(), set())
-        wheel_cache = WheelCache(options.cache_dir, format_control)
-        skip = set(stdlib_pkgs)
-        if not options.freeze_all:
-            skip.update(DEV_PKGS)
-
-        cmdoptions.check_list_path_option(options)
-
-        freeze_kwargs = dict(
-            requirement=options.requirements,
-            find_links=options.find_links,
-            local_only=options.local,
-            user_only=options.user,
-            paths=options.path,
-            skip_regex=options.skip_requirements_regex,
-            isolated=options.isolated_mode,
-            wheel_cache=wheel_cache,
-            skip=skip,
-            exclude_editable=options.exclude_editable,
-        )
-
-        try:
-            for line in freeze(**freeze_kwargs):
-                sys.stdout.write(line + '\n')
-        finally:
-            wheel_cache.cleanup()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/hash.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/hash.py
deleted file mode 100644
index 1dc7fb0..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/hash.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import hashlib
-import logging
-import sys
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR
-from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
-from pip._internal.utils.misc import read_chunks, write_output
-
-logger = logging.getLogger(__name__)
-
-
-class HashCommand(Command):
-    """
-    Compute a hash of a local package archive.
-
-    These can be used with --hash in a requirements file to do repeatable
-    installs.
-    """
-
-    usage = '%prog [options]  ...'
-    ignore_require_venv = True
-
-    def __init__(self, *args, **kw):
-        super(HashCommand, self).__init__(*args, **kw)
-        self.cmd_opts.add_option(
-            '-a', '--algorithm',
-            dest='algorithm',
-            choices=STRONG_HASHES,
-            action='store',
-            default=FAVORITE_HASH,
-            help='The hash algorithm to use: one of %s' %
-                 ', '.join(STRONG_HASHES))
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        if not args:
-            self.parser.print_usage(sys.stderr)
-            return ERROR
-
-        algorithm = options.algorithm
-        for path in args:
-            write_output('%s:\n--hash=%s:%s',
-                         path, algorithm, _hash_of_file(path, algorithm))
-
-
-def _hash_of_file(path, algorithm):
-    """Return the hash digest of a file."""
-    with open(path, 'rb') as archive:
-        hash = hashlib.new(algorithm)
-        for chunk in read_chunks(archive):
-            hash.update(chunk)
-    return hash.hexdigest()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/help.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/help.py
deleted file mode 100644
index 75af999..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/help.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import CommandError
-
-
-class HelpCommand(Command):
-    """Show help for commands"""
-
-    usage = """
-      %prog """
-    ignore_require_venv = True
-
-    def run(self, options, args):
-        from pip._internal.commands import (
-            commands_dict, create_command, get_similar_commands,
-        )
-
-        try:
-            # 'pip help' with no args is handled by pip.__init__.parseopt()
-            cmd_name = args[0]  # the command we need help for
-        except IndexError:
-            return SUCCESS
-
-        if cmd_name not in commands_dict:
-            guess = get_similar_commands(cmd_name)
-
-            msg = ['unknown command "%s"' % cmd_name]
-            if guess:
-                msg.append('maybe you meant "%s"' % guess)
-
-            raise CommandError(' - '.join(msg))
-
-        command = create_command(cmd_name)
-        command.parser.print_help()
-
-        return SUCCESS
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/install.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/install.py
deleted file mode 100644
index 66071f6..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/install.py
+++ /dev/null
@@ -1,630 +0,0 @@
-
-# The following comment should be removed at some point in the future.
-# It's included for now because without it InstallCommand.run() has a
-# couple errors where we have to know req.name is str rather than
-# Optional[str] for the InstallRequirement req.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import errno
-import logging
-import operator
-import os
-import shutil
-from optparse import SUPPRESS_HELP
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.req_command import RequirementCommand
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.exceptions import (
-    CommandError,
-    InstallationError,
-    PreviousBuildDirError,
-)
-from pip._internal.locations import distutils_scheme
-from pip._internal.operations.check import check_install_conflicts
-from pip._internal.req import RequirementSet, install_given_reqs
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.misc import (
-    ensure_dir,
-    get_installed_version,
-    protect_pip_from_modification_on_windows,
-    write_output,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.virtualenv import virtualenv_no_global
-from pip._internal.wheel import WheelBuilder
-
-if MYPY_CHECK_RUNNING:
-    from optparse import Values
-    from typing import Any, List, Optional
-
-    from pip._internal.models.format_control import FormatControl
-    from pip._internal.req.req_install import InstallRequirement
-    from pip._internal.wheel import BinaryAllowedPredicate
-
-
-logger = logging.getLogger(__name__)
-
-
-def is_wheel_installed():
-    """
-    Return whether the wheel package is installed.
-    """
-    try:
-        import wheel  # noqa: F401
-    except ImportError:
-        return False
-
-    return True
-
-
-def build_wheels(
-    builder,              # type: WheelBuilder
-    pep517_requirements,  # type: List[InstallRequirement]
-    legacy_requirements,  # type: List[InstallRequirement]
-):
-    # type: (...) -> List[InstallRequirement]
-    """
-    Build wheels for requirements, depending on whether wheel is installed.
-    """
-    # We don't build wheels for legacy requirements if wheel is not installed.
-    should_build_legacy = is_wheel_installed()
-
-    # Always build PEP 517 requirements
-    build_failures = builder.build(
-        pep517_requirements,
-        should_unpack=True,
-    )
-
-    if should_build_legacy:
-        # We don't care about failures building legacy
-        # requirements, as we'll fall through to a direct
-        # install for those.
-        builder.build(
-            legacy_requirements,
-            should_unpack=True,
-        )
-
-    return build_failures
-
-
-def get_check_binary_allowed(format_control):
-    # type: (FormatControl) -> BinaryAllowedPredicate
-    def check_binary_allowed(req):
-        # type: (InstallRequirement) -> bool
-        canonical_name = canonicalize_name(req.name)
-        allowed_formats = format_control.get_allowed_formats(canonical_name)
-        return "binary" in allowed_formats
-
-    return check_binary_allowed
-
-
-class InstallCommand(RequirementCommand):
-    """
-    Install packages from:
-
-    - PyPI (and other indexes) using requirement specifiers.
-    - VCS project urls.
-    - Local project directories.
-    - Local or remote source archives.
-
-    pip also supports installing from "requirements files", which provide
-    an easy way to specify a whole environment to be installed.
-    """
-
-    usage = """
-      %prog [options]  [package-index-options] ...
-      %prog [options] -r  [package-index-options] ...
-      %prog [options] [-e]  ...
-      %prog [options] [-e]  ...
-      %prog [options]  ..."""
-
-    def __init__(self, *args, **kw):
-        super(InstallCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-
-        cmd_opts.add_option(cmdoptions.requirements())
-        cmd_opts.add_option(cmdoptions.constraints())
-        cmd_opts.add_option(cmdoptions.no_deps())
-        cmd_opts.add_option(cmdoptions.pre())
-
-        cmd_opts.add_option(cmdoptions.editable())
-        cmd_opts.add_option(
-            '-t', '--target',
-            dest='target_dir',
-            metavar='dir',
-            default=None,
-            help='Install packages into . '
-                 'By default this will not replace existing files/folders in '
-                 '. Use --upgrade to replace existing packages in  '
-                 'with new versions.'
-        )
-        cmdoptions.add_target_python_options(cmd_opts)
-
-        cmd_opts.add_option(
-            '--user',
-            dest='use_user_site',
-            action='store_true',
-            help="Install to the Python user install directory for your "
-                 "platform. Typically ~/.local/, or %APPDATA%\\Python on "
-                 "Windows. (See the Python documentation for site.USER_BASE "
-                 "for full details.)")
-        cmd_opts.add_option(
-            '--no-user',
-            dest='use_user_site',
-            action='store_false',
-            help=SUPPRESS_HELP)
-        cmd_opts.add_option(
-            '--root',
-            dest='root_path',
-            metavar='dir',
-            default=None,
-            help="Install everything relative to this alternate root "
-                 "directory.")
-        cmd_opts.add_option(
-            '--prefix',
-            dest='prefix_path',
-            metavar='dir',
-            default=None,
-            help="Installation prefix where lib, bin and other top-level "
-                 "folders are placed")
-
-        cmd_opts.add_option(cmdoptions.build_dir())
-
-        cmd_opts.add_option(cmdoptions.src())
-
-        cmd_opts.add_option(
-            '-U', '--upgrade',
-            dest='upgrade',
-            action='store_true',
-            help='Upgrade all specified packages to the newest available '
-                 'version. The handling of dependencies depends on the '
-                 'upgrade-strategy used.'
-        )
-
-        cmd_opts.add_option(
-            '--upgrade-strategy',
-            dest='upgrade_strategy',
-            default='only-if-needed',
-            choices=['only-if-needed', 'eager'],
-            help='Determines how dependency upgrading should be handled '
-                 '[default: %default]. '
-                 '"eager" - dependencies are upgraded regardless of '
-                 'whether the currently installed version satisfies the '
-                 'requirements of the upgraded package(s). '
-                 '"only-if-needed" -  are upgraded only when they do not '
-                 'satisfy the requirements of the upgraded package(s).'
-        )
-
-        cmd_opts.add_option(
-            '--force-reinstall',
-            dest='force_reinstall',
-            action='store_true',
-            help='Reinstall all packages even if they are already '
-                 'up-to-date.')
-
-        cmd_opts.add_option(
-            '-I', '--ignore-installed',
-            dest='ignore_installed',
-            action='store_true',
-            help='Ignore the installed packages, overwriting them. '
-                 'This can break your system if the existing package '
-                 'is of a different version or was installed '
-                 'with a different package manager!'
-        )
-
-        cmd_opts.add_option(cmdoptions.ignore_requires_python())
-        cmd_opts.add_option(cmdoptions.no_build_isolation())
-        cmd_opts.add_option(cmdoptions.use_pep517())
-        cmd_opts.add_option(cmdoptions.no_use_pep517())
-
-        cmd_opts.add_option(cmdoptions.install_options())
-        cmd_opts.add_option(cmdoptions.global_options())
-
-        cmd_opts.add_option(
-            "--compile",
-            action="store_true",
-            dest="compile",
-            default=True,
-            help="Compile Python source files to bytecode",
-        )
-
-        cmd_opts.add_option(
-            "--no-compile",
-            action="store_false",
-            dest="compile",
-            help="Do not compile Python source files to bytecode",
-        )
-
-        cmd_opts.add_option(
-            "--no-warn-script-location",
-            action="store_false",
-            dest="warn_script_location",
-            default=True,
-            help="Do not warn when installing scripts outside PATH",
-        )
-        cmd_opts.add_option(
-            "--no-warn-conflicts",
-            action="store_false",
-            dest="warn_about_conflicts",
-            default=True,
-            help="Do not warn about broken dependencies",
-        )
-
-        cmd_opts.add_option(cmdoptions.no_binary())
-        cmd_opts.add_option(cmdoptions.only_binary())
-        cmd_opts.add_option(cmdoptions.prefer_binary())
-        cmd_opts.add_option(cmdoptions.no_clean())
-        cmd_opts.add_option(cmdoptions.require_hashes())
-        cmd_opts.add_option(cmdoptions.progress_bar())
-
-        index_opts = cmdoptions.make_option_group(
-            cmdoptions.index_group,
-            self.parser,
-        )
-
-        self.parser.insert_option_group(0, index_opts)
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def run(self, options, args):
-        # type: (Values, List[Any]) -> int
-        cmdoptions.check_install_build_global(options)
-        upgrade_strategy = "to-satisfy-only"
-        if options.upgrade:
-            upgrade_strategy = options.upgrade_strategy
-
-        if options.build_dir:
-            options.build_dir = os.path.abspath(options.build_dir)
-
-        cmdoptions.check_dist_restriction(options, check_target=True)
-
-        options.src_dir = os.path.abspath(options.src_dir)
-        install_options = options.install_options or []
-        if options.use_user_site:
-            if options.prefix_path:
-                raise CommandError(
-                    "Can not combine '--user' and '--prefix' as they imply "
-                    "different installation locations"
-                )
-            if virtualenv_no_global():
-                raise InstallationError(
-                    "Can not perform a '--user' install. User site-packages "
-                    "are not visible in this virtualenv."
-                )
-            install_options.append('--user')
-            install_options.append('--prefix=')
-
-        target_temp_dir = None  # type: Optional[TempDirectory]
-        target_temp_dir_path = None  # type: Optional[str]
-        if options.target_dir:
-            options.ignore_installed = True
-            options.target_dir = os.path.abspath(options.target_dir)
-            if (os.path.exists(options.target_dir) and not
-                    os.path.isdir(options.target_dir)):
-                raise CommandError(
-                    "Target path exists but is not a directory, will not "
-                    "continue."
-                )
-
-            # Create a target directory for using with the target option
-            target_temp_dir = TempDirectory(kind="target")
-            target_temp_dir_path = target_temp_dir.path
-            install_options.append('--home=' + target_temp_dir_path)
-
-        global_options = options.global_options or []
-
-        session = self.get_default_session(options)
-
-        target_python = make_target_python(options)
-        finder = self._build_package_finder(
-            options=options,
-            session=session,
-            target_python=target_python,
-            ignore_requires_python=options.ignore_requires_python,
-        )
-        build_delete = (not (options.no_clean or options.build_dir))
-        wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
-        if options.cache_dir and not check_path_owner(options.cache_dir):
-            logger.warning(
-                "The directory '%s' or its parent directory is not owned "
-                "by the current user and caching wheels has been "
-                "disabled. check the permissions and owner of that "
-                "directory. If executing pip with sudo, you may want "
-                "sudo's -H flag.",
-                options.cache_dir,
-            )
-            options.cache_dir = None
-
-        with RequirementTracker() as req_tracker, TempDirectory(
-            options.build_dir, delete=build_delete, kind="install"
-        ) as directory:
-            requirement_set = RequirementSet(
-                require_hashes=options.require_hashes,
-                check_supported_wheels=not options.target_dir,
-            )
-
-            try:
-                self.populate_requirement_set(
-                    requirement_set, args, options, finder, session,
-                    wheel_cache
-                )
-                preparer = self.make_requirement_preparer(
-                    temp_build_dir=directory,
-                    options=options,
-                    req_tracker=req_tracker,
-                )
-                resolver = self.make_resolver(
-                    preparer=preparer,
-                    finder=finder,
-                    session=session,
-                    options=options,
-                    wheel_cache=wheel_cache,
-                    use_user_site=options.use_user_site,
-                    ignore_installed=options.ignore_installed,
-                    ignore_requires_python=options.ignore_requires_python,
-                    force_reinstall=options.force_reinstall,
-                    upgrade_strategy=upgrade_strategy,
-                    use_pep517=options.use_pep517,
-                )
-                resolver.resolve(requirement_set)
-
-                try:
-                    pip_req = requirement_set.get_requirement("pip")
-                except KeyError:
-                    modifying_pip = None
-                else:
-                    # If we're not replacing an already installed pip,
-                    # we're not modifying it.
-                    modifying_pip = pip_req.satisfied_by is None
-                protect_pip_from_modification_on_windows(
-                    modifying_pip=modifying_pip
-                )
-
-                check_binary_allowed = get_check_binary_allowed(
-                    finder.format_control
-                )
-                # Consider legacy and PEP517-using requirements separately
-                legacy_requirements = []
-                pep517_requirements = []
-                for req in requirement_set.requirements.values():
-                    if req.use_pep517:
-                        pep517_requirements.append(req)
-                    else:
-                        legacy_requirements.append(req)
-
-                wheel_builder = WheelBuilder(
-                    preparer, wheel_cache,
-                    build_options=[], global_options=[],
-                    check_binary_allowed=check_binary_allowed,
-                )
-
-                build_failures = build_wheels(
-                    builder=wheel_builder,
-                    pep517_requirements=pep517_requirements,
-                    legacy_requirements=legacy_requirements,
-                )
-
-                # If we're using PEP 517, we cannot do a direct install
-                # so we fail here.
-                if build_failures:
-                    raise InstallationError(
-                        "Could not build wheels for {} which use"
-                        " PEP 517 and cannot be installed directly".format(
-                            ", ".join(r.name for r in build_failures)))
-
-                to_install = resolver.get_installation_order(
-                    requirement_set
-                )
-
-                # Consistency Checking of the package set we're installing.
-                should_warn_about_conflicts = (
-                    not options.ignore_dependencies and
-                    options.warn_about_conflicts
-                )
-                if should_warn_about_conflicts:
-                    self._warn_about_conflicts(to_install)
-
-                # Don't warn about script install locations if
-                # --target has been specified
-                warn_script_location = options.warn_script_location
-                if options.target_dir:
-                    warn_script_location = False
-
-                installed = install_given_reqs(
-                    to_install,
-                    install_options,
-                    global_options,
-                    root=options.root_path,
-                    home=target_temp_dir_path,
-                    prefix=options.prefix_path,
-                    pycompile=options.compile,
-                    warn_script_location=warn_script_location,
-                    use_user_site=options.use_user_site,
-                )
-
-                lib_locations = get_lib_location_guesses(
-                    user=options.use_user_site,
-                    home=target_temp_dir_path,
-                    root=options.root_path,
-                    prefix=options.prefix_path,
-                    isolated=options.isolated_mode,
-                )
-                working_set = pkg_resources.WorkingSet(lib_locations)
-
-                reqs = sorted(installed, key=operator.attrgetter('name'))
-                items = []
-                for req in reqs:
-                    item = req.name
-                    try:
-                        installed_version = get_installed_version(
-                            req.name, working_set=working_set
-                        )
-                        if installed_version:
-                            item += '-' + installed_version
-                    except Exception:
-                        pass
-                    items.append(item)
-                installed_desc = ' '.join(items)
-                if installed_desc:
-                    write_output(
-                        'Successfully installed %s', installed_desc,
-                    )
-            except EnvironmentError as error:
-                show_traceback = (self.verbosity >= 1)
-
-                message = create_env_error_message(
-                    error, show_traceback, options.use_user_site,
-                )
-                logger.error(message, exc_info=show_traceback)
-
-                return ERROR
-            except PreviousBuildDirError:
-                options.no_clean = True
-                raise
-            finally:
-                # Clean up
-                if not options.no_clean:
-                    requirement_set.cleanup_files()
-                    wheel_cache.cleanup()
-
-        if options.target_dir:
-            self._handle_target_dir(
-                options.target_dir, target_temp_dir, options.upgrade
-            )
-
-        return SUCCESS
-
-    def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
-        ensure_dir(target_dir)
-
-        # Checking both purelib and platlib directories for installed
-        # packages to be moved to target directory
-        lib_dir_list = []
-
-        with target_temp_dir:
-            # Checking both purelib and platlib directories for installed
-            # packages to be moved to target directory
-            scheme = distutils_scheme('', home=target_temp_dir.path)
-            purelib_dir = scheme['purelib']
-            platlib_dir = scheme['platlib']
-            data_dir = scheme['data']
-
-            if os.path.exists(purelib_dir):
-                lib_dir_list.append(purelib_dir)
-            if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
-                lib_dir_list.append(platlib_dir)
-            if os.path.exists(data_dir):
-                lib_dir_list.append(data_dir)
-
-            for lib_dir in lib_dir_list:
-                for item in os.listdir(lib_dir):
-                    if lib_dir == data_dir:
-                        ddir = os.path.join(data_dir, item)
-                        if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
-                            continue
-                    target_item_dir = os.path.join(target_dir, item)
-                    if os.path.exists(target_item_dir):
-                        if not upgrade:
-                            logger.warning(
-                                'Target directory %s already exists. Specify '
-                                '--upgrade to force replacement.',
-                                target_item_dir
-                            )
-                            continue
-                        if os.path.islink(target_item_dir):
-                            logger.warning(
-                                'Target directory %s already exists and is '
-                                'a link. Pip will not automatically replace '
-                                'links, please remove if replacement is '
-                                'desired.',
-                                target_item_dir
-                            )
-                            continue
-                        if os.path.isdir(target_item_dir):
-                            shutil.rmtree(target_item_dir)
-                        else:
-                            os.remove(target_item_dir)
-
-                    shutil.move(
-                        os.path.join(lib_dir, item),
-                        target_item_dir
-                    )
-
-    def _warn_about_conflicts(self, to_install):
-        try:
-            package_set, _dep_info = check_install_conflicts(to_install)
-        except Exception:
-            logger.error("Error checking for conflicts.", exc_info=True)
-            return
-        missing, conflicting = _dep_info
-
-        # NOTE: There is some duplication here from pip check
-        for project_name in missing:
-            version = package_set[project_name][0]
-            for dependency in missing[project_name]:
-                logger.critical(
-                    "%s %s requires %s, which is not installed.",
-                    project_name, version, dependency[1],
-                )
-
-        for project_name in conflicting:
-            version = package_set[project_name][0]
-            for dep_name, dep_version, req in conflicting[project_name]:
-                logger.critical(
-                    "%s %s has requirement %s, but you'll have %s %s which is "
-                    "incompatible.",
-                    project_name, version, req, dep_name, dep_version,
-                )
-
-
-def get_lib_location_guesses(*args, **kwargs):
-    scheme = distutils_scheme('', *args, **kwargs)
-    return [scheme['purelib'], scheme['platlib']]
-
-
-def create_env_error_message(error, show_traceback, using_user_site):
-    """Format an error message for an EnvironmentError
-
-    It may occur anytime during the execution of the install command.
-    """
-    parts = []
-
-    # Mention the error if we are not going to show a traceback
-    parts.append("Could not install packages due to an EnvironmentError")
-    if not show_traceback:
-        parts.append(": ")
-        parts.append(str(error))
-    else:
-        parts.append(".")
-
-    # Spilt the error indication from a helper message (if any)
-    parts[-1] += "\n"
-
-    # Suggest useful actions to the user:
-    #  (1) using user site-packages or (2) verifying the permissions
-    if error.errno == errno.EACCES:
-        user_option_part = "Consider using the `--user` option"
-        permissions_part = "Check the permissions"
-
-        if not using_user_site:
-            parts.extend([
-                user_option_part, " or ",
-                permissions_part.lower(),
-            ])
-        else:
-            parts.append(permissions_part)
-        parts.append(".\n")
-
-    return "".join(parts).strip() + "\n"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/list.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/list.py
deleted file mode 100644
index 77a245b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/list.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import json
-import logging
-
-from pip._vendor import six
-from pip._vendor.six.moves import zip_longest
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import IndexGroupCommand
-from pip._internal.exceptions import CommandError
-from pip._internal.index import PackageFinder
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.self_outdated_check import make_link_collector
-from pip._internal.utils.misc import (
-    dist_is_editable,
-    get_installed_distributions,
-    write_output,
-)
-from pip._internal.utils.packaging import get_installer
-
-logger = logging.getLogger(__name__)
-
-
-class ListCommand(IndexGroupCommand):
-    """
-    List installed packages, including editables.
-
-    Packages are listed in a case-insensitive sorted order.
-    """
-
-    usage = """
-      %prog [options]"""
-
-    def __init__(self, *args, **kw):
-        super(ListCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-
-        cmd_opts.add_option(
-            '-o', '--outdated',
-            action='store_true',
-            default=False,
-            help='List outdated packages')
-        cmd_opts.add_option(
-            '-u', '--uptodate',
-            action='store_true',
-            default=False,
-            help='List uptodate packages')
-        cmd_opts.add_option(
-            '-e', '--editable',
-            action='store_true',
-            default=False,
-            help='List editable projects.')
-        cmd_opts.add_option(
-            '-l', '--local',
-            action='store_true',
-            default=False,
-            help=('If in a virtualenv that has global access, do not list '
-                  'globally-installed packages.'),
-        )
-        self.cmd_opts.add_option(
-            '--user',
-            dest='user',
-            action='store_true',
-            default=False,
-            help='Only output packages installed in user-site.')
-        cmd_opts.add_option(cmdoptions.list_path())
-        cmd_opts.add_option(
-            '--pre',
-            action='store_true',
-            default=False,
-            help=("Include pre-release and development versions. By default, "
-                  "pip only finds stable versions."),
-        )
-
-        cmd_opts.add_option(
-            '--format',
-            action='store',
-            dest='list_format',
-            default="columns",
-            choices=('columns', 'freeze', 'json'),
-            help="Select the output format among: columns (default), freeze, "
-                 "or json",
-        )
-
-        cmd_opts.add_option(
-            '--not-required',
-            action='store_true',
-            dest='not_required',
-            help="List packages that are not dependencies of "
-                 "installed packages.",
-        )
-
-        cmd_opts.add_option(
-            '--exclude-editable',
-            action='store_false',
-            dest='include_editable',
-            help='Exclude editable package from output.',
-        )
-        cmd_opts.add_option(
-            '--include-editable',
-            action='store_true',
-            dest='include_editable',
-            help='Include editable package from output.',
-            default=True,
-        )
-        index_opts = cmdoptions.make_option_group(
-            cmdoptions.index_group, self.parser
-        )
-
-        self.parser.insert_option_group(0, index_opts)
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def _build_package_finder(self, options, session):
-        """
-        Create a package finder appropriate to this list command.
-        """
-        link_collector = make_link_collector(session, options=options)
-
-        # Pass allow_yanked=False to ignore yanked versions.
-        selection_prefs = SelectionPreferences(
-            allow_yanked=False,
-            allow_all_prereleases=options.pre,
-        )
-
-        return PackageFinder.create(
-            link_collector=link_collector,
-            selection_prefs=selection_prefs,
-        )
-
-    def run(self, options, args):
-        if options.outdated and options.uptodate:
-            raise CommandError(
-                "Options --outdated and --uptodate cannot be combined.")
-
-        cmdoptions.check_list_path_option(options)
-
-        packages = get_installed_distributions(
-            local_only=options.local,
-            user_only=options.user,
-            editables_only=options.editable,
-            include_editables=options.include_editable,
-            paths=options.path,
-        )
-
-        # get_not_required must be called firstly in order to find and
-        # filter out all dependencies correctly. Otherwise a package
-        # can't be identified as requirement because some parent packages
-        # could be filtered out before.
-        if options.not_required:
-            packages = self.get_not_required(packages, options)
-
-        if options.outdated:
-            packages = self.get_outdated(packages, options)
-        elif options.uptodate:
-            packages = self.get_uptodate(packages, options)
-
-        self.output_package_listing(packages, options)
-
-    def get_outdated(self, packages, options):
-        return [
-            dist for dist in self.iter_packages_latest_infos(packages, options)
-            if dist.latest_version > dist.parsed_version
-        ]
-
-    def get_uptodate(self, packages, options):
-        return [
-            dist for dist in self.iter_packages_latest_infos(packages, options)
-            if dist.latest_version == dist.parsed_version
-        ]
-
-    def get_not_required(self, packages, options):
-        dep_keys = set()
-        for dist in packages:
-            dep_keys.update(requirement.key for requirement in dist.requires())
-        return {pkg for pkg in packages if pkg.key not in dep_keys}
-
-    def iter_packages_latest_infos(self, packages, options):
-        with self._build_session(options) as session:
-            finder = self._build_package_finder(options, session)
-
-            for dist in packages:
-                typ = 'unknown'
-                all_candidates = finder.find_all_candidates(dist.key)
-                if not options.pre:
-                    # Remove prereleases
-                    all_candidates = [candidate for candidate in all_candidates
-                                      if not candidate.version.is_prerelease]
-
-                evaluator = finder.make_candidate_evaluator(
-                    project_name=dist.project_name,
-                )
-                best_candidate = evaluator.sort_best_candidate(all_candidates)
-                if best_candidate is None:
-                    continue
-
-                remote_version = best_candidate.version
-                if best_candidate.link.is_wheel:
-                    typ = 'wheel'
-                else:
-                    typ = 'sdist'
-                # This is dirty but makes the rest of the code much cleaner
-                dist.latest_version = remote_version
-                dist.latest_filetype = typ
-                yield dist
-
-    def output_package_listing(self, packages, options):
-        packages = sorted(
-            packages,
-            key=lambda dist: dist.project_name.lower(),
-        )
-        if options.list_format == 'columns' and packages:
-            data, header = format_for_columns(packages, options)
-            self.output_package_listing_columns(data, header)
-        elif options.list_format == 'freeze':
-            for dist in packages:
-                if options.verbose >= 1:
-                    write_output("%s==%s (%s)", dist.project_name,
-                                 dist.version, dist.location)
-                else:
-                    write_output("%s==%s", dist.project_name, dist.version)
-        elif options.list_format == 'json':
-            write_output(format_for_json(packages, options))
-
-    def output_package_listing_columns(self, data, header):
-        # insert the header first: we need to know the size of column names
-        if len(data) > 0:
-            data.insert(0, header)
-
-        pkg_strings, sizes = tabulate(data)
-
-        # Create and add a separator.
-        if len(data) > 0:
-            pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
-
-        for val in pkg_strings:
-            write_output(val)
-
-
-def tabulate(vals):
-    # From pfmoore on GitHub:
-    # https://github.com/pypa/pip/issues/3651#issuecomment-216932564
-    assert len(vals) > 0
-
-    sizes = [0] * max(len(x) for x in vals)
-    for row in vals:
-        sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
-
-    result = []
-    for row in vals:
-        display = " ".join([str(c).ljust(s) if c is not None else ''
-                            for s, c in zip_longest(sizes, row)])
-        result.append(display)
-
-    return result, sizes
-
-
-def format_for_columns(pkgs, options):
-    """
-    Convert the package data into something usable
-    by output_package_listing_columns.
-    """
-    running_outdated = options.outdated
-    # Adjust the header for the `pip list --outdated` case.
-    if running_outdated:
-        header = ["Package", "Version", "Latest", "Type"]
-    else:
-        header = ["Package", "Version"]
-
-    data = []
-    if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
-        header.append("Location")
-    if options.verbose >= 1:
-        header.append("Installer")
-
-    for proj in pkgs:
-        # if we're working on the 'outdated' list, separate out the
-        # latest_version and type
-        row = [proj.project_name, proj.version]
-
-        if running_outdated:
-            row.append(proj.latest_version)
-            row.append(proj.latest_filetype)
-
-        if options.verbose >= 1 or dist_is_editable(proj):
-            row.append(proj.location)
-        if options.verbose >= 1:
-            row.append(get_installer(proj))
-
-        data.append(row)
-
-    return data, header
-
-
-def format_for_json(packages, options):
-    data = []
-    for dist in packages:
-        info = {
-            'name': dist.project_name,
-            'version': six.text_type(dist.version),
-        }
-        if options.verbose >= 1:
-            info['location'] = dist.location
-            info['installer'] = get_installer(dist)
-        if options.outdated:
-            info['latest_version'] = six.text_type(dist.latest_version)
-            info['latest_filetype'] = dist.latest_filetype
-        data.append(info)
-    return json.dumps(data)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/search.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/search.py
deleted file mode 100644
index 2e880ee..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/search.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import sys
-import textwrap
-from collections import OrderedDict
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.version import parse as parse_version
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client  # type: ignore
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.req_command import SessionCommandMixin
-from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
-from pip._internal.exceptions import CommandError
-from pip._internal.models.index import PyPI
-from pip._internal.network.xmlrpc import PipXmlrpcTransport
-from pip._internal.utils.compat import get_terminal_size
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class SearchCommand(Command, SessionCommandMixin):
-    """Search for PyPI packages whose name or summary contains ."""
-
-    usage = """
-      %prog [options] """
-    ignore_require_venv = True
-
-    def __init__(self, *args, **kw):
-        super(SearchCommand, self).__init__(*args, **kw)
-        self.cmd_opts.add_option(
-            '-i', '--index',
-            dest='index',
-            metavar='URL',
-            default=PyPI.pypi_url,
-            help='Base URL of Python Package Index (default %default)')
-
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        if not args:
-            raise CommandError('Missing required argument (search query).')
-        query = args
-        pypi_hits = self.search(query, options)
-        hits = transform_hits(pypi_hits)
-
-        terminal_width = None
-        if sys.stdout.isatty():
-            terminal_width = get_terminal_size()[0]
-
-        print_results(hits, terminal_width=terminal_width)
-        if pypi_hits:
-            return SUCCESS
-        return NO_MATCHES_FOUND
-
-    def search(self, query, options):
-        index_url = options.index
-
-        session = self.get_default_session(options)
-
-        transport = PipXmlrpcTransport(index_url, session)
-        pypi = xmlrpc_client.ServerProxy(index_url, transport)
-        hits = pypi.search({'name': query, 'summary': query}, 'or')
-        return hits
-
-
-def transform_hits(hits):
-    """
-    The list from pypi is really a list of versions. We want a list of
-    packages with the list of versions stored inline. This converts the
-    list from pypi into one we can use.
-    """
-    packages = OrderedDict()
-    for hit in hits:
-        name = hit['name']
-        summary = hit['summary']
-        version = hit['version']
-
-        if name not in packages.keys():
-            packages[name] = {
-                'name': name,
-                'summary': summary,
-                'versions': [version],
-            }
-        else:
-            packages[name]['versions'].append(version)
-
-            # if this is the highest version, replace summary and score
-            if version == highest_version(packages[name]['versions']):
-                packages[name]['summary'] = summary
-
-    return list(packages.values())
-
-
-def print_results(hits, name_column_width=None, terminal_width=None):
-    if not hits:
-        return
-    if name_column_width is None:
-        name_column_width = max([
-            len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
-            for hit in hits
-        ]) + 4
-
-    installed_packages = [p.project_name for p in pkg_resources.working_set]
-    for hit in hits:
-        name = hit['name']
-        summary = hit['summary'] or ''
-        latest = highest_version(hit.get('versions', ['-']))
-        if terminal_width is not None:
-            target_width = terminal_width - name_column_width - 5
-            if target_width > 10:
-                # wrap and indent summary to fit terminal
-                summary = textwrap.wrap(summary, target_width)
-                summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
-
-        line = '%-*s - %s' % (name_column_width,
-                              '%s (%s)' % (name, latest), summary)
-        try:
-            write_output(line)
-            if name in installed_packages:
-                dist = pkg_resources.get_distribution(name)
-                with indent_log():
-                    if dist.version == latest:
-                        write_output('INSTALLED: %s (latest)', dist.version)
-                    else:
-                        write_output('INSTALLED: %s', dist.version)
-                        if parse_version(latest).pre:
-                            write_output('LATEST:    %s (pre-release; install'
-                                         ' with "pip install --pre")', latest)
-                        else:
-                            write_output('LATEST:    %s', latest)
-        except UnicodeEncodeError:
-            pass
-
-
-def highest_version(versions):
-    return max(versions, key=parse_version)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/show.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/show.py
deleted file mode 100644
index a46b08e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/show.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-from email.parser import FeedParser
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class ShowCommand(Command):
-    """
-    Show information about one or more installed packages.
-
-    The output is in RFC-compliant mail header format.
-    """
-
-    usage = """
-      %prog [options]  ..."""
-    ignore_require_venv = True
-
-    def __init__(self, *args, **kw):
-        super(ShowCommand, self).__init__(*args, **kw)
-        self.cmd_opts.add_option(
-            '-f', '--files',
-            dest='files',
-            action='store_true',
-            default=False,
-            help='Show the full list of installed files for each package.')
-
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        if not args:
-            logger.warning('ERROR: Please provide a package name or names.')
-            return ERROR
-        query = args
-
-        results = search_packages_info(query)
-        if not print_results(
-                results, list_files=options.files, verbose=options.verbose):
-            return ERROR
-        return SUCCESS
-
-
-def search_packages_info(query):
-    """
-    Gather details from installed distributions. Print distribution name,
-    version, location, and installed files. Installed files requires a
-    pip generated 'installed-files.txt' in the distributions '.egg-info'
-    directory.
-    """
-    installed = {}
-    for p in pkg_resources.working_set:
-        installed[canonicalize_name(p.project_name)] = p
-
-    query_names = [canonicalize_name(name) for name in query]
-    missing = sorted(
-        [name for name, pkg in zip(query, query_names) if pkg not in installed]
-    )
-    if missing:
-        logger.warning('Package(s) not found: %s', ', '.join(missing))
-
-    def get_requiring_packages(package_name):
-        canonical_name = canonicalize_name(package_name)
-        return [
-            pkg.project_name for pkg in pkg_resources.working_set
-            if canonical_name in
-               [canonicalize_name(required.name) for required in
-                pkg.requires()]
-        ]
-
-    for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
-        package = {
-            'name': dist.project_name,
-            'version': dist.version,
-            'location': dist.location,
-            'requires': [dep.project_name for dep in dist.requires()],
-            'required_by': get_requiring_packages(dist.project_name)
-        }
-        file_list = None
-        metadata = None
-        if isinstance(dist, pkg_resources.DistInfoDistribution):
-            # RECORDs should be part of .dist-info metadatas
-            if dist.has_metadata('RECORD'):
-                lines = dist.get_metadata_lines('RECORD')
-                paths = [l.split(',')[0] for l in lines]
-                paths = [os.path.join(dist.location, p) for p in paths]
-                file_list = [os.path.relpath(p, dist.location) for p in paths]
-
-            if dist.has_metadata('METADATA'):
-                metadata = dist.get_metadata('METADATA')
-        else:
-            # Otherwise use pip's log for .egg-info's
-            if dist.has_metadata('installed-files.txt'):
-                paths = dist.get_metadata_lines('installed-files.txt')
-                paths = [os.path.join(dist.egg_info, p) for p in paths]
-                file_list = [os.path.relpath(p, dist.location) for p in paths]
-
-            if dist.has_metadata('PKG-INFO'):
-                metadata = dist.get_metadata('PKG-INFO')
-
-        if dist.has_metadata('entry_points.txt'):
-            entry_points = dist.get_metadata_lines('entry_points.txt')
-            package['entry_points'] = entry_points
-
-        if dist.has_metadata('INSTALLER'):
-            for line in dist.get_metadata_lines('INSTALLER'):
-                if line.strip():
-                    package['installer'] = line.strip()
-                    break
-
-        # @todo: Should pkg_resources.Distribution have a
-        # `get_pkg_info` method?
-        feed_parser = FeedParser()
-        feed_parser.feed(metadata)
-        pkg_info_dict = feed_parser.close()
-        for key in ('metadata-version', 'summary',
-                    'home-page', 'author', 'author-email', 'license'):
-            package[key] = pkg_info_dict.get(key)
-
-        # It looks like FeedParser cannot deal with repeated headers
-        classifiers = []
-        for line in metadata.splitlines():
-            if line.startswith('Classifier: '):
-                classifiers.append(line[len('Classifier: '):])
-        package['classifiers'] = classifiers
-
-        if file_list:
-            package['files'] = sorted(file_list)
-        yield package
-
-
-def print_results(distributions, list_files=False, verbose=False):
-    """
-    Print the informations from installed distributions found.
-    """
-    results_printed = False
-    for i, dist in enumerate(distributions):
-        results_printed = True
-        if i > 0:
-            write_output("---")
-
-        write_output("Name: %s", dist.get('name', ''))
-        write_output("Version: %s", dist.get('version', ''))
-        write_output("Summary: %s", dist.get('summary', ''))
-        write_output("Home-page: %s", dist.get('home-page', ''))
-        write_output("Author: %s", dist.get('author', ''))
-        write_output("Author-email: %s", dist.get('author-email', ''))
-        write_output("License: %s", dist.get('license', ''))
-        write_output("Location: %s", dist.get('location', ''))
-        write_output("Requires: %s", ', '.join(dist.get('requires', [])))
-        write_output("Required-by: %s", ', '.join(dist.get('required_by', [])))
-
-        if verbose:
-            write_output("Metadata-Version: %s",
-                         dist.get('metadata-version', ''))
-            write_output("Installer: %s", dist.get('installer', ''))
-            write_output("Classifiers:")
-            for classifier in dist.get('classifiers', []):
-                write_output("  %s", classifier)
-            write_output("Entry-points:")
-            for entry in dist.get('entry_points', []):
-                write_output("  %s", entry.strip())
-        if list_files:
-            write_output("Files:")
-            for line in dist.get('files', []):
-                write_output("  %s", line.strip())
-            if "files" not in dist:
-                write_output("Cannot locate installed-files.txt")
-    return results_printed
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
deleted file mode 100644
index 1bde414..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/uninstall.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.req_command import SessionCommandMixin
-from pip._internal.exceptions import InstallationError
-from pip._internal.req import parse_requirements
-from pip._internal.req.constructors import install_req_from_line
-from pip._internal.utils.misc import protect_pip_from_modification_on_windows
-
-
-class UninstallCommand(Command, SessionCommandMixin):
-    """
-    Uninstall packages.
-
-    pip is able to uninstall most installed packages. Known exceptions are:
-
-    - Pure distutils packages installed with ``python setup.py install``, which
-      leave behind no metadata to determine what files were installed.
-    - Script wrappers installed by ``python setup.py develop``.
-    """
-
-    usage = """
-      %prog [options]  ...
-      %prog [options] -r  ..."""
-
-    def __init__(self, *args, **kw):
-        super(UninstallCommand, self).__init__(*args, **kw)
-        self.cmd_opts.add_option(
-            '-r', '--requirement',
-            dest='requirements',
-            action='append',
-            default=[],
-            metavar='file',
-            help='Uninstall all the packages listed in the given requirements '
-                 'file.  This option can be used multiple times.',
-        )
-        self.cmd_opts.add_option(
-            '-y', '--yes',
-            dest='yes',
-            action='store_true',
-            help="Don't ask for confirmation of uninstall deletions.")
-
-        self.parser.insert_option_group(0, self.cmd_opts)
-
-    def run(self, options, args):
-        session = self.get_default_session(options)
-
-        reqs_to_uninstall = {}
-        for name in args:
-            req = install_req_from_line(
-                name, isolated=options.isolated_mode,
-            )
-            if req.name:
-                reqs_to_uninstall[canonicalize_name(req.name)] = req
-        for filename in options.requirements:
-            for req in parse_requirements(
-                    filename,
-                    options=options,
-                    session=session):
-                if req.name:
-                    reqs_to_uninstall[canonicalize_name(req.name)] = req
-        if not reqs_to_uninstall:
-            raise InstallationError(
-                'You must give at least one requirement to %(name)s (see '
-                '"pip help %(name)s")' % dict(name=self.name)
-            )
-
-        protect_pip_from_modification_on_windows(
-            modifying_pip="pip" in reqs_to_uninstall
-        )
-
-        for req in reqs_to_uninstall.values():
-            uninstall_pathset = req.uninstall(
-                auto_confirm=options.yes, verbose=self.verbosity > 0,
-            )
-            if uninstall_pathset:
-                uninstall_pathset.commit()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/wheel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
deleted file mode 100644
index 7230470..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/commands/wheel.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import RequirementCommand
-from pip._internal.exceptions import CommandError, PreviousBuildDirError
-from pip._internal.req import RequirementSet
-from pip._internal.req.req_tracker import RequirementTracker
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import WheelBuilder
-
-if MYPY_CHECK_RUNNING:
-    from optparse import Values
-    from typing import Any, List
-
-
-logger = logging.getLogger(__name__)
-
-
-class WheelCommand(RequirementCommand):
-    """
-    Build Wheel archives for your requirements and dependencies.
-
-    Wheel is a built-package format, and offers the advantage of not
-    recompiling your software during every install. For more details, see the
-    wheel docs: https://wheel.readthedocs.io/en/latest/
-
-    Requirements: setuptools>=0.8, and wheel.
-
-    'pip wheel' uses the bdist_wheel setuptools extension from the wheel
-    package to build individual wheels.
-
-    """
-
-    usage = """
-      %prog [options]  ...
-      %prog [options] -r  ...
-      %prog [options] [-e]  ...
-      %prog [options] [-e]  ...
-      %prog [options]  ..."""
-
-    def __init__(self, *args, **kw):
-        super(WheelCommand, self).__init__(*args, **kw)
-
-        cmd_opts = self.cmd_opts
-
-        cmd_opts.add_option(
-            '-w', '--wheel-dir',
-            dest='wheel_dir',
-            metavar='dir',
-            default=os.curdir,
-            help=("Build wheels into , where the default is the "
-                  "current working directory."),
-        )
-        cmd_opts.add_option(cmdoptions.no_binary())
-        cmd_opts.add_option(cmdoptions.only_binary())
-        cmd_opts.add_option(cmdoptions.prefer_binary())
-        cmd_opts.add_option(
-            '--build-option',
-            dest='build_options',
-            metavar='options',
-            action='append',
-            help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
-        )
-        cmd_opts.add_option(cmdoptions.no_build_isolation())
-        cmd_opts.add_option(cmdoptions.use_pep517())
-        cmd_opts.add_option(cmdoptions.no_use_pep517())
-        cmd_opts.add_option(cmdoptions.constraints())
-        cmd_opts.add_option(cmdoptions.editable())
-        cmd_opts.add_option(cmdoptions.requirements())
-        cmd_opts.add_option(cmdoptions.src())
-        cmd_opts.add_option(cmdoptions.ignore_requires_python())
-        cmd_opts.add_option(cmdoptions.no_deps())
-        cmd_opts.add_option(cmdoptions.build_dir())
-        cmd_opts.add_option(cmdoptions.progress_bar())
-
-        cmd_opts.add_option(
-            '--global-option',
-            dest='global_options',
-            action='append',
-            metavar='options',
-            help="Extra global options to be supplied to the setup.py "
-            "call before the 'bdist_wheel' command.")
-
-        cmd_opts.add_option(
-            '--pre',
-            action='store_true',
-            default=False,
-            help=("Include pre-release and development versions. By default, "
-                  "pip only finds stable versions."),
-        )
-
-        cmd_opts.add_option(cmdoptions.no_clean())
-        cmd_opts.add_option(cmdoptions.require_hashes())
-
-        index_opts = cmdoptions.make_option_group(
-            cmdoptions.index_group,
-            self.parser,
-        )
-
-        self.parser.insert_option_group(0, index_opts)
-        self.parser.insert_option_group(0, cmd_opts)
-
-    def run(self, options, args):
-        # type: (Values, List[Any]) -> None
-        cmdoptions.check_install_build_global(options)
-
-        if options.build_dir:
-            options.build_dir = os.path.abspath(options.build_dir)
-
-        options.src_dir = os.path.abspath(options.src_dir)
-
-        session = self.get_default_session(options)
-
-        finder = self._build_package_finder(options, session)
-        build_delete = (not (options.no_clean or options.build_dir))
-        wheel_cache = WheelCache(options.cache_dir, options.format_control)
-
-        with RequirementTracker() as req_tracker, TempDirectory(
-            options.build_dir, delete=build_delete, kind="wheel"
-        ) as directory:
-
-            requirement_set = RequirementSet(
-                require_hashes=options.require_hashes,
-            )
-
-            try:
-                self.populate_requirement_set(
-                    requirement_set, args, options, finder, session,
-                    wheel_cache
-                )
-
-                preparer = self.make_requirement_preparer(
-                    temp_build_dir=directory,
-                    options=options,
-                    req_tracker=req_tracker,
-                    wheel_download_dir=options.wheel_dir,
-                )
-
-                resolver = self.make_resolver(
-                    preparer=preparer,
-                    finder=finder,
-                    session=session,
-                    options=options,
-                    wheel_cache=wheel_cache,
-                    ignore_requires_python=options.ignore_requires_python,
-                    use_pep517=options.use_pep517,
-                )
-                resolver.resolve(requirement_set)
-
-                # build wheels
-                wb = WheelBuilder(
-                    preparer, wheel_cache,
-                    build_options=options.build_options or [],
-                    global_options=options.global_options or [],
-                    no_clean=options.no_clean,
-                )
-                build_failures = wb.build(
-                    requirement_set.requirements.values(),
-                )
-                if len(build_failures) != 0:
-                    raise CommandError(
-                        "Failed to build one or more wheels"
-                    )
-            except PreviousBuildDirError:
-                options.no_clean = True
-                raise
-            finally:
-                if not options.no_clean:
-                    requirement_set.cleanup_files()
-                    wheel_cache.cleanup()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/configuration.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/configuration.py
deleted file mode 100644
index 858c660..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/configuration.py
+++ /dev/null
@@ -1,422 +0,0 @@
-"""Configuration management setup
-
-Some terminology:
-- name
-  As written in config files.
-- value
-  Value associated with a name
-- key
-  Name combined with it's section (section.name)
-- variant
-  A single word describing where the configuration key-value pair came from
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import locale
-import logging
-import os
-import sys
-
-from pip._vendor.six.moves import configparser
-
-from pip._internal.exceptions import (
-    ConfigurationError,
-    ConfigurationFileCouldNotBeLoaded,
-)
-from pip._internal.utils import appdirs
-from pip._internal.utils.compat import WINDOWS, expanduser
-from pip._internal.utils.misc import ensure_dir, enum
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Dict, Iterable, List, NewType, Optional, Tuple
-    )
-
-    RawConfigParser = configparser.RawConfigParser  # Shorthand
-    Kind = NewType("Kind", str)
-
-logger = logging.getLogger(__name__)
-
-
-# NOTE: Maybe use the optionx attribute to normalize keynames.
-def _normalize_name(name):
-    # type: (str) -> str
-    """Make a name consistent regardless of source (environment or file)
-    """
-    name = name.lower().replace('_', '-')
-    if name.startswith('--'):
-        name = name[2:]  # only prefer long opts
-    return name
-
-
-def _disassemble_key(name):
-    # type: (str) -> List[str]
-    if "." not in name:
-        error_message = (
-            "Key does not contain dot separated section and key. "
-            "Perhaps you wanted to use 'global.{}' instead?"
-        ).format(name)
-        raise ConfigurationError(error_message)
-    return name.split(".", 1)
-
-
-# The kinds of configurations there are.
-kinds = enum(
-    USER="user",        # User Specific
-    GLOBAL="global",    # System Wide
-    SITE="site",        # [Virtual] Environment Specific
-    ENV="env",          # from PIP_CONFIG_FILE
-    ENV_VAR="env-var",  # from Environment Variables
-)
-
-
-CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
-
-
-def get_configuration_files():
-    global_config_files = [
-        os.path.join(path, CONFIG_BASENAME)
-        for path in appdirs.site_config_dirs('pip')
-    ]
-
-    site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
-    legacy_config_file = os.path.join(
-        expanduser('~'),
-        'pip' if WINDOWS else '.pip',
-        CONFIG_BASENAME,
-    )
-    new_config_file = os.path.join(
-        appdirs.user_config_dir("pip"), CONFIG_BASENAME
-    )
-    return {
-        kinds.GLOBAL: global_config_files,
-        kinds.SITE: [site_config_file],
-        kinds.USER: [legacy_config_file, new_config_file],
-    }
-
-
-class Configuration(object):
-    """Handles management of configuration.
-
-    Provides an interface to accessing and managing configuration files.
-
-    This class converts provides an API that takes "section.key-name" style
-    keys and stores the value associated with it as "key-name" under the
-    section "section".
-
-    This allows for a clean interface wherein the both the section and the
-    key-name are preserved in an easy to manage form in the configuration files
-    and the data stored is also nice.
-    """
-
-    def __init__(self, isolated, load_only=None):
-        # type: (bool, Kind) -> None
-        super(Configuration, self).__init__()
-
-        _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None]
-        if load_only not in _valid_load_only:
-            raise ConfigurationError(
-                "Got invalid value for load_only - should be one of {}".format(
-                    ", ".join(map(repr, _valid_load_only[:-1]))
-                )
-            )
-        self.isolated = isolated  # type: bool
-        self.load_only = load_only  # type: Optional[Kind]
-
-        # The order here determines the override order.
-        self._override_order = [
-            kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
-        ]
-
-        self._ignore_env_names = ["version", "help"]
-
-        # Because we keep track of where we got the data from
-        self._parsers = {
-            variant: [] for variant in self._override_order
-        }  # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
-        self._config = {
-            variant: {} for variant in self._override_order
-        }  # type: Dict[Kind, Dict[str, Any]]
-        self._modified_parsers = []  # type: List[Tuple[str, RawConfigParser]]
-
-    def load(self):
-        # type: () -> None
-        """Loads configuration from configuration files and environment
-        """
-        self._load_config_files()
-        if not self.isolated:
-            self._load_environment_vars()
-
-    def get_file_to_edit(self):
-        # type: () -> Optional[str]
-        """Returns the file with highest priority in configuration
-        """
-        assert self.load_only is not None, \
-            "Need to be specified a file to be editing"
-
-        try:
-            return self._get_parser_to_modify()[0]
-        except IndexError:
-            return None
-
-    def items(self):
-        # type: () -> Iterable[Tuple[str, Any]]
-        """Returns key-value pairs like dict.items() representing the loaded
-        configuration
-        """
-        return self._dictionary.items()
-
-    def get_value(self, key):
-        # type: (str) -> Any
-        """Get a value from the configuration.
-        """
-        try:
-            return self._dictionary[key]
-        except KeyError:
-            raise ConfigurationError("No such key - {}".format(key))
-
-    def set_value(self, key, value):
-        # type: (str, Any) -> None
-        """Modify a value in the configuration.
-        """
-        self._ensure_have_load_only()
-
-        fname, parser = self._get_parser_to_modify()
-
-        if parser is not None:
-            section, name = _disassemble_key(key)
-
-            # Modify the parser and the configuration
-            if not parser.has_section(section):
-                parser.add_section(section)
-            parser.set(section, name, value)
-
-        self._config[self.load_only][key] = value
-        self._mark_as_modified(fname, parser)
-
-    def unset_value(self, key):
-        # type: (str) -> None
-        """Unset a value in the configuration.
-        """
-        self._ensure_have_load_only()
-
-        if key not in self._config[self.load_only]:
-            raise ConfigurationError("No such key - {}".format(key))
-
-        fname, parser = self._get_parser_to_modify()
-
-        if parser is not None:
-            section, name = _disassemble_key(key)
-
-            # Remove the key in the parser
-            modified_something = False
-            if parser.has_section(section):
-                # Returns whether the option was removed or not
-                modified_something = parser.remove_option(section, name)
-
-            if modified_something:
-                # name removed from parser, section may now be empty
-                section_iter = iter(parser.items(section))
-                try:
-                    val = next(section_iter)
-                except StopIteration:
-                    val = None
-
-                if val is None:
-                    parser.remove_section(section)
-
-                self._mark_as_modified(fname, parser)
-            else:
-                raise ConfigurationError(
-                    "Fatal Internal error [id=1]. Please report as a bug."
-                )
-
-        del self._config[self.load_only][key]
-
-    def save(self):
-        # type: () -> None
-        """Save the current in-memory state.
-        """
-        self._ensure_have_load_only()
-
-        for fname, parser in self._modified_parsers:
-            logger.info("Writing to %s", fname)
-
-            # Ensure directory exists.
-            ensure_dir(os.path.dirname(fname))
-
-            with open(fname, "w") as f:
-                parser.write(f)
-
-    #
-    # Private routines
-    #
-
-    def _ensure_have_load_only(self):
-        # type: () -> None
-        if self.load_only is None:
-            raise ConfigurationError("Needed a specific file to be modifying.")
-        logger.debug("Will be working with %s variant only", self.load_only)
-
-    @property
-    def _dictionary(self):
-        # type: () -> Dict[str, Any]
-        """A dictionary representing the loaded configuration.
-        """
-        # NOTE: Dictionaries are not populated if not loaded. So, conditionals
-        #       are not needed here.
-        retval = {}
-
-        for variant in self._override_order:
-            retval.update(self._config[variant])
-
-        return retval
-
-    def _load_config_files(self):
-        # type: () -> None
-        """Loads configuration from configuration files
-        """
-        config_files = dict(self._iter_config_files())
-        if config_files[kinds.ENV][0:1] == [os.devnull]:
-            logger.debug(
-                "Skipping loading configuration files due to "
-                "environment's PIP_CONFIG_FILE being os.devnull"
-            )
-            return
-
-        for variant, files in config_files.items():
-            for fname in files:
-                # If there's specific variant set in `load_only`, load only
-                # that variant, not the others.
-                if self.load_only is not None and variant != self.load_only:
-                    logger.debug(
-                        "Skipping file '%s' (variant: %s)", fname, variant
-                    )
-                    continue
-
-                parser = self._load_file(variant, fname)
-
-                # Keeping track of the parsers used
-                self._parsers[variant].append((fname, parser))
-
-    def _load_file(self, variant, fname):
-        # type: (Kind, str) -> RawConfigParser
-        logger.debug("For variant '%s', will try loading '%s'", variant, fname)
-        parser = self._construct_parser(fname)
-
-        for section in parser.sections():
-            items = parser.items(section)
-            self._config[variant].update(self._normalized_keys(section, items))
-
-        return parser
-
-    def _construct_parser(self, fname):
-        # type: (str) -> RawConfigParser
-        parser = configparser.RawConfigParser()
-        # If there is no such file, don't bother reading it but create the
-        # parser anyway, to hold the data.
-        # Doing this is useful when modifying and saving files, where we don't
-        # need to construct a parser.
-        if os.path.exists(fname):
-            try:
-                parser.read(fname)
-            except UnicodeDecodeError:
-                # See https://github.com/pypa/pip/issues/4963
-                raise ConfigurationFileCouldNotBeLoaded(
-                    reason="contains invalid {} characters".format(
-                        locale.getpreferredencoding(False)
-                    ),
-                    fname=fname,
-                )
-            except configparser.Error as error:
-                # See https://github.com/pypa/pip/issues/4893
-                raise ConfigurationFileCouldNotBeLoaded(error=error)
-        return parser
-
-    def _load_environment_vars(self):
-        # type: () -> None
-        """Loads configuration from environment variables
-        """
-        self._config[kinds.ENV_VAR].update(
-            self._normalized_keys(":env:", self._get_environ_vars())
-        )
-
-    def _normalized_keys(self, section, items):
-        # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
-        """Normalizes items to construct a dictionary with normalized keys.
-
-        This routine is where the names become keys and are made the same
-        regardless of source - configuration files or environment.
-        """
-        normalized = {}
-        for name, val in items:
-            key = section + "." + _normalize_name(name)
-            normalized[key] = val
-        return normalized
-
-    def _get_environ_vars(self):
-        # type: () -> Iterable[Tuple[str, str]]
-        """Returns a generator with all environmental vars with prefix PIP_"""
-        for key, val in os.environ.items():
-            should_be_yielded = (
-                key.startswith("PIP_") and
-                key[4:].lower() not in self._ignore_env_names
-            )
-            if should_be_yielded:
-                yield key[4:].lower(), val
-
-    # XXX: This is patched in the tests.
-    def _iter_config_files(self):
-        # type: () -> Iterable[Tuple[Kind, List[str]]]
-        """Yields variant and configuration files associated with it.
-
-        This should be treated like items of a dictionary.
-        """
-        # SMELL: Move the conditions out of this function
-
-        # environment variables have the lowest priority
-        config_file = os.environ.get('PIP_CONFIG_FILE', None)
-        if config_file is not None:
-            yield kinds.ENV, [config_file]
-        else:
-            yield kinds.ENV, []
-
-        config_files = get_configuration_files()
-
-        # at the base we have any global configuration
-        yield kinds.GLOBAL, config_files[kinds.GLOBAL]
-
-        # per-user configuration next
-        should_load_user_config = not self.isolated and not (
-            config_file and os.path.exists(config_file)
-        )
-        if should_load_user_config:
-            # The legacy config file is overridden by the new config file
-            yield kinds.USER, config_files[kinds.USER]
-
-        # finally virtualenv configuration first trumping others
-        yield kinds.SITE, config_files[kinds.SITE]
-
-    def _get_parser_to_modify(self):
-        # type: () -> Tuple[str, RawConfigParser]
-        # Determine which parser to modify
-        parsers = self._parsers[self.load_only]
-        if not parsers:
-            # This should not happen if everything works correctly.
-            raise ConfigurationError(
-                "Fatal Internal error [id=2]. Please report as a bug."
-            )
-
-        # Use the highest priority parser.
-        return parsers[-1]
-
-    # XXX: This is patched in the tests.
-    def _mark_as_modified(self, fname, parser):
-        # type: (str, RawConfigParser) -> None
-        file_parser_tuple = (fname, parser)
-        if file_parser_tuple not in self._modified_parsers:
-            self._modified_parsers.append(file_parser_tuple)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
deleted file mode 100644
index bba02f2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/__init__.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from pip._internal.distributions.source.legacy import SourceDistribution
-from pip._internal.distributions.wheel import WheelDistribution
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from pip._internal.distributions.base import AbstractDistribution
-    from pip._internal.req.req_install import InstallRequirement
-
-
-def make_distribution_for_install_requirement(install_req):
-    # type: (InstallRequirement) -> AbstractDistribution
-    """Returns a Distribution for the given InstallRequirement
-    """
-    # Editable requirements will always be source distributions. They use the
-    # legacy logic until we create a modern standard for them.
-    if install_req.editable:
-        return SourceDistribution(install_req)
-
-    # If it's a wheel, it's a WheelDistribution
-    if install_req.is_wheel:
-        return WheelDistribution(install_req)
-
-    # Otherwise, a SourceDistribution
-    return SourceDistribution(install_req)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/base.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/base.py
deleted file mode 100644
index 929bbef..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/base.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import abc
-
-from pip._vendor.six import add_metaclass
-
-
-@add_metaclass(abc.ABCMeta)
-class AbstractDistribution(object):
-    """A base class for handling installable artifacts.
-
-    The requirements for anything installable are as follows:
-
-     - we must be able to determine the requirement name
-       (or we can't correctly handle the non-upgrade case).
-
-     - for packages with setup requirements, we must also be able
-       to determine their requirements without installing additional
-       packages (for the same reason as run-time dependencies)
-
-     - we must be able to create a Distribution object exposing the
-       above metadata.
-    """
-
-    def __init__(self, req):
-        super(AbstractDistribution, self).__init__()
-        self.req = req
-
-    @abc.abstractmethod
-    def get_pkg_resources_distribution(self):
-        raise NotImplementedError()
-
-    @abc.abstractmethod
-    def prepare_distribution_metadata(self, finder, build_isolation):
-        raise NotImplementedError()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/installed.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
deleted file mode 100644
index 454fb48..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/installed.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from pip._internal.distributions.base import AbstractDistribution
-
-
-class InstalledDistribution(AbstractDistribution):
-    """Represents an installed package.
-
-    This does not need any preparation as the required information has already
-    been computed.
-    """
-
-    def get_pkg_resources_distribution(self):
-        return self.req.satisfied_by
-
-    def prepare_distribution_metadata(self, finder, build_isolation):
-        pass
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/source/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/source/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/source/legacy.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/source/legacy.py
deleted file mode 100644
index ab43afb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/source/legacy.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import logging
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.distributions.base import AbstractDistribution
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.subprocess import runner_with_spinner_message
-
-logger = logging.getLogger(__name__)
-
-
-class SourceDistribution(AbstractDistribution):
-    """Represents a source distribution.
-
-    The preparation step for these needs metadata for the packages to be
-    generated, either using PEP 517 or using the legacy `setup.py egg_info`.
-
-    NOTE from @pradyunsg (14 June 2019)
-    I expect SourceDistribution class will need to be split into
-    `legacy_source` (setup.py based) and `source` (PEP 517 based) when we start
-    bringing logic for preparation out of InstallRequirement into this class.
-    """
-
-    def get_pkg_resources_distribution(self):
-        return self.req.get_dist()
-
-    def prepare_distribution_metadata(self, finder, build_isolation):
-        # Prepare for building. We need to:
-        #   1. Load pyproject.toml (if it exists)
-        #   2. Set up the build environment
-
-        self.req.load_pyproject_toml()
-        should_isolate = self.req.use_pep517 and build_isolation
-        if should_isolate:
-            self._setup_isolation(finder)
-
-        self.req.prepare_metadata()
-        self.req.assert_source_matches_version()
-
-    def _setup_isolation(self, finder):
-        def _raise_conflicts(conflicting_with, conflicting_reqs):
-            format_string = (
-                "Some build dependencies for {requirement} "
-                "conflict with {conflicting_with}: {description}."
-            )
-            error_message = format_string.format(
-                requirement=self.req,
-                conflicting_with=conflicting_with,
-                description=', '.join(
-                    '%s is incompatible with %s' % (installed, wanted)
-                    for installed, wanted in sorted(conflicting)
-                )
-            )
-            raise InstallationError(error_message)
-
-        # Isolate in a BuildEnvironment and install the build-time
-        # requirements.
-        self.req.build_env = BuildEnvironment()
-        self.req.build_env.install_requirements(
-            finder, self.req.pyproject_requires, 'overlay',
-            "Installing build dependencies"
-        )
-        conflicting, missing = self.req.build_env.check_requirements(
-            self.req.requirements_to_check
-        )
-        if conflicting:
-            _raise_conflicts("PEP 517/518 supported requirements",
-                             conflicting)
-        if missing:
-            logger.warning(
-                "Missing build requirements in pyproject.toml for %s.",
-                self.req,
-            )
-            logger.warning(
-                "The project does not specify a build backend, and "
-                "pip cannot fall back to setuptools without %s.",
-                " and ".join(map(repr, sorted(missing)))
-            )
-        # Install any extra build dependencies that the backend requests.
-        # This must be done in a second pass, as the pyproject.toml
-        # dependencies must be installed before we can call the backend.
-        with self.req.build_env:
-            runner = runner_with_spinner_message(
-                "Getting requirements to build wheel"
-            )
-            backend = self.req.pep517_backend
-            with backend.subprocess_runner(runner):
-                reqs = backend.get_requires_for_build_wheel()
-
-        conflicting, missing = self.req.build_env.check_requirements(reqs)
-        if conflicting:
-            _raise_conflicts("the backend dependencies", conflicting)
-        self.req.build_env.install_requirements(
-            finder, missing, 'normal',
-            "Installing backend dependencies"
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
deleted file mode 100644
index 128951f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/distributions/wheel.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from pip._vendor import pkg_resources
-
-from pip._internal.distributions.base import AbstractDistribution
-
-
-class WheelDistribution(AbstractDistribution):
-    """Represents a wheel distribution.
-
-    This does not need any preparation as wheels can be directly unpacked.
-    """
-
-    def get_pkg_resources_distribution(self):
-        return list(pkg_resources.find_distributions(
-                    self.req.source_dir))[0]
-
-    def prepare_distribution_metadata(self, finder, build_isolation):
-        pass
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/download.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/download.py
deleted file mode 100644
index 6567fc3..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/download.py
+++ /dev/null
@@ -1,578 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import cgi
-import logging
-import mimetypes
-import os
-import re
-import shutil
-import sys
-
-from pip._vendor import requests
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-from pip._vendor.six import PY2
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.exceptions import HashMismatch, InstallationError
-from pip._internal.models.index import PyPI
-from pip._internal.network.session import PipSession
-from pip._internal.utils.encoding import auto_decode
-from pip._internal.utils.filesystem import copy2_fixed
-from pip._internal.utils.misc import (
-    ask_path_exists,
-    backup_dir,
-    consume,
-    display_path,
-    format_size,
-    hide_url,
-    path_to_display,
-    rmtree,
-    splitext,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import DownloadProgressProvider
-from pip._internal.utils.unpacking import unpack_file
-from pip._internal.utils.urls import get_url_scheme
-from pip._internal.vcs import vcs
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        IO, Callable, List, Optional, Text, Tuple,
-    )
-
-    from mypy_extensions import TypedDict
-
-    from pip._internal.models.link import Link
-    from pip._internal.utils.hashes import Hashes
-    from pip._internal.vcs.versioncontrol import VersionControl
-
-    if PY2:
-        CopytreeKwargs = TypedDict(
-            'CopytreeKwargs',
-            {
-                'ignore': Callable[[str, List[str]], List[str]],
-                'symlinks': bool,
-            },
-            total=False,
-        )
-    else:
-        CopytreeKwargs = TypedDict(
-            'CopytreeKwargs',
-            {
-                'copy_function': Callable[[str, str], None],
-                'ignore': Callable[[str, List[str]], List[str]],
-                'ignore_dangling_symlinks': bool,
-                'symlinks': bool,
-            },
-            total=False,
-        )
-
-
-__all__ = ['get_file_content',
-           'unpack_vcs_link',
-           'unpack_file_url',
-           'unpack_http_url', 'unpack_url',
-           'parse_content_disposition', 'sanitize_content_filename']
-
-
-logger = logging.getLogger(__name__)
-
-
-def get_file_content(url, comes_from=None, session=None):
-    # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
-    """Gets the content of a file; it may be a filename, file: URL, or
-    http: URL.  Returns (location, content).  Content is unicode.
-
-    :param url:         File path or url.
-    :param comes_from:  Origin description of requirements.
-    :param session:     Instance of pip.download.PipSession.
-    """
-    if session is None:
-        raise TypeError(
-            "get_file_content() missing 1 required keyword argument: 'session'"
-        )
-
-    scheme = get_url_scheme(url)
-
-    if scheme in ['http', 'https']:
-        # FIXME: catch some errors
-        resp = session.get(url)
-        resp.raise_for_status()
-        return resp.url, resp.text
-
-    elif scheme == 'file':
-        if comes_from and comes_from.startswith('http'):
-            raise InstallationError(
-                'Requirements file %s references URL %s, which is local'
-                % (comes_from, url))
-
-        path = url.split(':', 1)[1]
-        path = path.replace('\\', '/')
-        match = _url_slash_drive_re.match(path)
-        if match:
-            path = match.group(1) + ':' + path.split('|', 1)[1]
-        path = urllib_parse.unquote(path)
-        if path.startswith('/'):
-            path = '/' + path.lstrip('/')
-        url = path
-
-    try:
-        with open(url, 'rb') as f:
-            content = auto_decode(f.read())
-    except IOError as exc:
-        raise InstallationError(
-            'Could not open requirements file: %s' % str(exc)
-        )
-    return url, content
-
-
-_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
-
-
-def unpack_vcs_link(link, location):
-    # type: (Link, str) -> None
-    vcs_backend = _get_used_vcs_backend(link)
-    assert vcs_backend is not None
-    vcs_backend.unpack(location, url=hide_url(link.url))
-
-
-def _get_used_vcs_backend(link):
-    # type: (Link) -> Optional[VersionControl]
-    """
-    Return a VersionControl object or None.
-    """
-    for vcs_backend in vcs.backends:
-        if link.scheme in vcs_backend.schemes:
-            return vcs_backend
-    return None
-
-
-def _progress_indicator(iterable, *args, **kwargs):
-    return iterable
-
-
-def _download_url(
-    resp,  # type: Response
-    link,  # type: Link
-    content_file,  # type: IO
-    hashes,  # type: Optional[Hashes]
-    progress_bar  # type: str
-):
-    # type: (...) -> None
-    try:
-        total_length = int(resp.headers['content-length'])
-    except (ValueError, KeyError, TypeError):
-        total_length = 0
-
-    cached_resp = getattr(resp, "from_cache", False)
-    if logger.getEffectiveLevel() > logging.INFO:
-        show_progress = False
-    elif cached_resp:
-        show_progress = False
-    elif total_length > (40 * 1000):
-        show_progress = True
-    elif not total_length:
-        show_progress = True
-    else:
-        show_progress = False
-
-    show_url = link.show_url
-
-    def resp_read(chunk_size):
-        try:
-            # Special case for urllib3.
-            for chunk in resp.raw.stream(
-                    chunk_size,
-                    # We use decode_content=False here because we don't
-                    # want urllib3 to mess with the raw bytes we get
-                    # from the server. If we decompress inside of
-                    # urllib3 then we cannot verify the checksum
-                    # because the checksum will be of the compressed
-                    # file. This breakage will only occur if the
-                    # server adds a Content-Encoding header, which
-                    # depends on how the server was configured:
-                    # - Some servers will notice that the file isn't a
-                    #   compressible file and will leave the file alone
-                    #   and with an empty Content-Encoding
-                    # - Some servers will notice that the file is
-                    #   already compressed and will leave the file
-                    #   alone and will add a Content-Encoding: gzip
-                    #   header
-                    # - Some servers won't notice anything at all and
-                    #   will take a file that's already been compressed
-                    #   and compress it again and set the
-                    #   Content-Encoding: gzip header
-                    #
-                    # By setting this not to decode automatically we
-                    # hope to eliminate problems with the second case.
-                    decode_content=False):
-                yield chunk
-        except AttributeError:
-            # Standard file-like object.
-            while True:
-                chunk = resp.raw.read(chunk_size)
-                if not chunk:
-                    break
-                yield chunk
-
-    def written_chunks(chunks):
-        for chunk in chunks:
-            content_file.write(chunk)
-            yield chunk
-
-    progress_indicator = _progress_indicator
-
-    if link.netloc == PyPI.netloc:
-        url = show_url
-    else:
-        url = link.url_without_fragment
-
-    if show_progress:  # We don't show progress on cached responses
-        progress_indicator = DownloadProgressProvider(progress_bar,
-                                                      max=total_length)
-        if total_length:
-            logger.info("Downloading %s (%s)", url, format_size(total_length))
-        else:
-            logger.info("Downloading %s", url)
-    elif cached_resp:
-        logger.info("Using cached %s", url)
-    else:
-        logger.info("Downloading %s", url)
-
-    downloaded_chunks = written_chunks(
-        progress_indicator(
-            resp_read(CONTENT_CHUNK_SIZE),
-            CONTENT_CHUNK_SIZE
-        )
-    )
-    if hashes:
-        hashes.check_against_chunks(downloaded_chunks)
-    else:
-        consume(downloaded_chunks)
-
-
-def _copy_file(filename, location, link):
-    copy = True
-    download_location = os.path.join(location, link.filename)
-    if os.path.exists(download_location):
-        response = ask_path_exists(
-            'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
-            display_path(download_location), ('i', 'w', 'b', 'a'))
-        if response == 'i':
-            copy = False
-        elif response == 'w':
-            logger.warning('Deleting %s', display_path(download_location))
-            os.remove(download_location)
-        elif response == 'b':
-            dest_file = backup_dir(download_location)
-            logger.warning(
-                'Backing up %s to %s',
-                display_path(download_location),
-                display_path(dest_file),
-            )
-            shutil.move(download_location, dest_file)
-        elif response == 'a':
-            sys.exit(-1)
-    if copy:
-        shutil.copy(filename, download_location)
-        logger.info('Saved %s', display_path(download_location))
-
-
-def unpack_http_url(
-    link,  # type: Link
-    location,  # type: str
-    download_dir=None,  # type: Optional[str]
-    session=None,  # type: Optional[PipSession]
-    hashes=None,  # type: Optional[Hashes]
-    progress_bar="on"  # type: str
-):
-    # type: (...) -> None
-    if session is None:
-        raise TypeError(
-            "unpack_http_url() missing 1 required keyword argument: 'session'"
-        )
-
-    with TempDirectory(kind="unpack") as temp_dir:
-        # If a download dir is specified, is the file already downloaded there?
-        already_downloaded_path = None
-        if download_dir:
-            already_downloaded_path = _check_download_dir(link,
-                                                          download_dir,
-                                                          hashes)
-
-        if already_downloaded_path:
-            from_path = already_downloaded_path
-            content_type = mimetypes.guess_type(from_path)[0]
-        else:
-            # let's download to a tmp dir
-            from_path, content_type = _download_http_url(link,
-                                                         session,
-                                                         temp_dir.path,
-                                                         hashes,
-                                                         progress_bar)
-
-        # unpack the archive to the build dir location. even when only
-        # downloading archives, they have to be unpacked to parse dependencies
-        unpack_file(from_path, location, content_type)
-
-        # a download dir is specified; let's copy the archive there
-        if download_dir and not already_downloaded_path:
-            _copy_file(from_path, download_dir, link)
-
-        if not already_downloaded_path:
-            os.unlink(from_path)
-
-
-def _copy2_ignoring_special_files(src, dest):
-    # type: (str, str) -> None
-    """Copying special files is not supported, but as a convenience to users
-    we skip errors copying them. This supports tools that may create e.g.
-    socket files in the project source directory.
-    """
-    try:
-        copy2_fixed(src, dest)
-    except shutil.SpecialFileError as e:
-        # SpecialFileError may be raised due to either the source or
-        # destination. If the destination was the cause then we would actually
-        # care, but since the destination directory is deleted prior to
-        # copy we ignore all of them assuming it is caused by the source.
-        logger.warning(
-            "Ignoring special file error '%s' encountered copying %s to %s.",
-            str(e),
-            path_to_display(src),
-            path_to_display(dest),
-        )
-
-
-def _copy_source_tree(source, target):
-    # type: (str, str) -> None
-    def ignore(d, names):
-        # Pulling in those directories can potentially be very slow,
-        # exclude the following directories if they appear in the top
-        # level dir (and only it).
-        # See discussion at https://github.com/pypa/pip/pull/6770
-        return ['.tox', '.nox'] if d == source else []
-
-    kwargs = dict(ignore=ignore, symlinks=True)  # type: CopytreeKwargs
-
-    if not PY2:
-        # Python 2 does not support copy_function, so we only ignore
-        # errors on special file copy in Python 3.
-        kwargs['copy_function'] = _copy2_ignoring_special_files
-
-    shutil.copytree(source, target, **kwargs)
-
-
-def unpack_file_url(
-    link,  # type: Link
-    location,  # type: str
-    download_dir=None,  # type: Optional[str]
-    hashes=None  # type: Optional[Hashes]
-):
-    # type: (...) -> None
-    """Unpack link into location.
-
-    If download_dir is provided and link points to a file, make a copy
-    of the link file inside download_dir.
-    """
-    link_path = link.file_path
-    # If it's a url to a local directory
-    if link.is_existing_dir():
-        if os.path.isdir(location):
-            rmtree(location)
-        _copy_source_tree(link_path, location)
-        if download_dir:
-            logger.info('Link is a directory, ignoring download_dir')
-        return
-
-    # If --require-hashes is off, `hashes` is either empty, the
-    # link's embedded hash, or MissingHashes; it is required to
-    # match. If --require-hashes is on, we are satisfied by any
-    # hash in `hashes` matching: a URL-based or an option-based
-    # one; no internet-sourced hash will be in `hashes`.
-    if hashes:
-        hashes.check_against_path(link_path)
-
-    # If a download dir is specified, is the file already there and valid?
-    already_downloaded_path = None
-    if download_dir:
-        already_downloaded_path = _check_download_dir(link,
-                                                      download_dir,
-                                                      hashes)
-
-    if already_downloaded_path:
-        from_path = already_downloaded_path
-    else:
-        from_path = link_path
-
-    content_type = mimetypes.guess_type(from_path)[0]
-
-    # unpack the archive to the build dir location. even when only downloading
-    # archives, they have to be unpacked to parse dependencies
-    unpack_file(from_path, location, content_type)
-
-    # a download dir is specified and not already downloaded
-    if download_dir and not already_downloaded_path:
-        _copy_file(from_path, download_dir, link)
-
-
-def unpack_url(
-    link,  # type: Link
-    location,  # type: str
-    download_dir=None,  # type: Optional[str]
-    session=None,  # type: Optional[PipSession]
-    hashes=None,  # type: Optional[Hashes]
-    progress_bar="on"  # type: str
-):
-    # type: (...) -> None
-    """Unpack link.
-       If link is a VCS link:
-         if only_download, export into download_dir and ignore location
-          else unpack into location
-       for other types of link:
-         - unpack into location
-         - if download_dir, copy the file into download_dir
-         - if only_download, mark location for deletion
-
-    :param hashes: A Hashes object, one of whose embedded hashes must match,
-        or HashMismatch will be raised. If the Hashes is empty, no matches are
-        required, and unhashable types of requirements (like VCS ones, which
-        would ordinarily raise HashUnsupported) are allowed.
-    """
-    # non-editable vcs urls
-    if link.is_vcs:
-        unpack_vcs_link(link, location)
-
-    # file urls
-    elif link.is_file:
-        unpack_file_url(link, location, download_dir, hashes=hashes)
-
-    # http urls
-    else:
-        if session is None:
-            session = PipSession()
-
-        unpack_http_url(
-            link,
-            location,
-            download_dir,
-            session,
-            hashes=hashes,
-            progress_bar=progress_bar
-        )
-
-
-def sanitize_content_filename(filename):
-    # type: (str) -> str
-    """
-    Sanitize the "filename" value from a Content-Disposition header.
-    """
-    return os.path.basename(filename)
-
-
-def parse_content_disposition(content_disposition, default_filename):
-    # type: (str, str) -> str
-    """
-    Parse the "filename" value from a Content-Disposition header, and
-    return the default filename if the result is empty.
-    """
-    _type, params = cgi.parse_header(content_disposition)
-    filename = params.get('filename')
-    if filename:
-        # We need to sanitize the filename to prevent directory traversal
-        # in case the filename contains ".." path parts.
-        filename = sanitize_content_filename(filename)
-    return filename or default_filename
-
-
-def _download_http_url(
-    link,  # type: Link
-    session,  # type: PipSession
-    temp_dir,  # type: str
-    hashes,  # type: Optional[Hashes]
-    progress_bar  # type: str
-):
-    # type: (...) -> Tuple[str, str]
-    """Download link url into temp_dir using provided session"""
-    target_url = link.url.split('#', 1)[0]
-    try:
-        resp = session.get(
-            target_url,
-            # We use Accept-Encoding: identity here because requests
-            # defaults to accepting compressed responses. This breaks in
-            # a variety of ways depending on how the server is configured.
-            # - Some servers will notice that the file isn't a compressible
-            #   file and will leave the file alone and with an empty
-            #   Content-Encoding
-            # - Some servers will notice that the file is already
-            #   compressed and will leave the file alone and will add a
-            #   Content-Encoding: gzip header
-            # - Some servers won't notice anything at all and will take
-            #   a file that's already been compressed and compress it again
-            #   and set the Content-Encoding: gzip header
-            # By setting this to request only the identity encoding We're
-            # hoping to eliminate the third case. Hopefully there does not
-            # exist a server which when given a file will notice it is
-            # already compressed and that you're not asking for a
-            # compressed file and will then decompress it before sending
-            # because if that's the case I don't think it'll ever be
-            # possible to make this work.
-            headers={"Accept-Encoding": "identity"},
-            stream=True,
-        )
-        resp.raise_for_status()
-    except requests.HTTPError as exc:
-        logger.critical(
-            "HTTP error %s while getting %s", exc.response.status_code, link,
-        )
-        raise
-
-    content_type = resp.headers.get('content-type', '')
-    filename = link.filename  # fallback
-    # Have a look at the Content-Disposition header for a better guess
-    content_disposition = resp.headers.get('content-disposition')
-    if content_disposition:
-        filename = parse_content_disposition(content_disposition, filename)
-    ext = splitext(filename)[1]  # type: Optional[str]
-    if not ext:
-        ext = mimetypes.guess_extension(content_type)
-        if ext:
-            filename += ext
-    if not ext and link.url != resp.url:
-        ext = os.path.splitext(resp.url)[1]
-        if ext:
-            filename += ext
-    file_path = os.path.join(temp_dir, filename)
-    with open(file_path, 'wb') as content_file:
-        _download_url(resp, link, content_file, hashes, progress_bar)
-    return file_path, content_type
-
-
-def _check_download_dir(link, download_dir, hashes):
-    # type: (Link, str, Optional[Hashes]) -> Optional[str]
-    """ Check download_dir for previously downloaded file with correct hash
-        If a correct file is found return its path else None
-    """
-    download_path = os.path.join(download_dir, link.filename)
-
-    if not os.path.exists(download_path):
-        return None
-
-    # If already downloaded, does its hash match?
-    logger.info('File was already downloaded %s', download_path)
-    if hashes:
-        try:
-            hashes.check_against_path(download_path)
-        except HashMismatch:
-            logger.warning(
-                'Previously-downloaded file %s has bad hash. '
-                'Re-downloading.',
-                download_path
-            )
-            os.unlink(download_path)
-            return None
-    return download_path
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/exceptions.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/exceptions.py
deleted file mode 100644
index dddec78..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/exceptions.py
+++ /dev/null
@@ -1,308 +0,0 @@
-"""Exceptions used throughout package"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-from itertools import chain, groupby, repeat
-
-from pip._vendor.six import iteritems
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional
-    from pip._vendor.pkg_resources import Distribution
-    from pip._internal.req.req_install import InstallRequirement
-
-
-class PipError(Exception):
-    """Base pip exception"""
-
-
-class ConfigurationError(PipError):
-    """General exception in configuration"""
-
-
-class InstallationError(PipError):
-    """General exception during installation"""
-
-
-class UninstallationError(PipError):
-    """General exception during uninstallation"""
-
-
-class NoneMetadataError(PipError):
-    """
-    Raised when accessing "METADATA" or "PKG-INFO" metadata for a
-    pip._vendor.pkg_resources.Distribution object and
-    `dist.has_metadata('METADATA')` returns True but
-    `dist.get_metadata('METADATA')` returns None (and similarly for
-    "PKG-INFO").
-    """
-
-    def __init__(self, dist, metadata_name):
-        # type: (Distribution, str) -> None
-        """
-        :param dist: A Distribution object.
-        :param metadata_name: The name of the metadata being accessed
-            (can be "METADATA" or "PKG-INFO").
-        """
-        self.dist = dist
-        self.metadata_name = metadata_name
-
-    def __str__(self):
-        # type: () -> str
-        # Use `dist` in the error message because its stringification
-        # includes more information, like the version and location.
-        return (
-            'None {} metadata found for distribution: {}'.format(
-                self.metadata_name, self.dist,
-            )
-        )
-
-
-class DistributionNotFound(InstallationError):
-    """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class RequirementsFileParseError(InstallationError):
-    """Raised when a general error occurs parsing a requirements file line."""
-
-
-class BestVersionAlreadyInstalled(PipError):
-    """Raised when the most up-to-date version of a package is already
-    installed."""
-
-
-class BadCommand(PipError):
-    """Raised when virtualenv or a command is not found"""
-
-
-class CommandError(PipError):
-    """Raised when there is an error in command-line arguments"""
-
-
-class PreviousBuildDirError(PipError):
-    """Raised when there's a previous conflicting build directory"""
-
-
-class InvalidWheelFilename(InstallationError):
-    """Invalid wheel filename."""
-
-
-class UnsupportedWheel(InstallationError):
-    """Unsupported wheel."""
-
-
-class HashErrors(InstallationError):
-    """Multiple HashError instances rolled into one for reporting"""
-
-    def __init__(self):
-        self.errors = []
-
-    def append(self, error):
-        self.errors.append(error)
-
-    def __str__(self):
-        lines = []
-        self.errors.sort(key=lambda e: e.order)
-        for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
-            lines.append(cls.head)
-            lines.extend(e.body() for e in errors_of_cls)
-        if lines:
-            return '\n'.join(lines)
-
-    def __nonzero__(self):
-        return bool(self.errors)
-
-    def __bool__(self):
-        return self.__nonzero__()
-
-
-class HashError(InstallationError):
-    """
-    A failure to verify a package against known-good hashes
-
-    :cvar order: An int sorting hash exception classes by difficulty of
-        recovery (lower being harder), so the user doesn't bother fretting
-        about unpinned packages when he has deeper issues, like VCS
-        dependencies, to deal with. Also keeps error reports in a
-        deterministic order.
-    :cvar head: A section heading for display above potentially many
-        exceptions of this kind
-    :ivar req: The InstallRequirement that triggered this error. This is
-        pasted on after the exception is instantiated, because it's not
-        typically available earlier.
-
-    """
-    req = None  # type: Optional[InstallRequirement]
-    head = ''
-
-    def body(self):
-        """Return a summary of me for display under the heading.
-
-        This default implementation simply prints a description of the
-        triggering requirement.
-
-        :param req: The InstallRequirement that provoked this error, with
-            populate_link() having already been called
-
-        """
-        return '    %s' % self._requirement_name()
-
-    def __str__(self):
-        return '%s\n%s' % (self.head, self.body())
-
-    def _requirement_name(self):
-        """Return a description of the requirement that triggered me.
-
-        This default implementation returns long description of the req, with
-        line numbers
-
-        """
-        return str(self.req) if self.req else 'unknown package'
-
-
-class VcsHashUnsupported(HashError):
-    """A hash was provided for a version-control-system-based requirement, but
-    we don't have a method for hashing those."""
-
-    order = 0
-    head = ("Can't verify hashes for these requirements because we don't "
-            "have a way to hash version control repositories:")
-
-
-class DirectoryUrlHashUnsupported(HashError):
-    """A hash was provided for a version-control-system-based requirement, but
-    we don't have a method for hashing those."""
-
-    order = 1
-    head = ("Can't verify hashes for these file:// requirements because they "
-            "point to directories:")
-
-
-class HashMissing(HashError):
-    """A hash was needed for a requirement but is absent."""
-
-    order = 2
-    head = ('Hashes are required in --require-hashes mode, but they are '
-            'missing from some requirements. Here is a list of those '
-            'requirements along with the hashes their downloaded archives '
-            'actually had. Add lines like these to your requirements files to '
-            'prevent tampering. (If you did not enable --require-hashes '
-            'manually, note that it turns on automatically when any package '
-            'has a hash.)')
-
-    def __init__(self, gotten_hash):
-        """
-        :param gotten_hash: The hash of the (possibly malicious) archive we
-            just downloaded
-        """
-        self.gotten_hash = gotten_hash
-
-    def body(self):
-        # Dodge circular import.
-        from pip._internal.utils.hashes import FAVORITE_HASH
-
-        package = None
-        if self.req:
-            # In the case of URL-based requirements, display the original URL
-            # seen in the requirements file rather than the package name,
-            # so the output can be directly copied into the requirements file.
-            package = (self.req.original_link if self.req.original_link
-                       # In case someone feeds something downright stupid
-                       # to InstallRequirement's constructor.
-                       else getattr(self.req, 'req', None))
-        return '    %s --hash=%s:%s' % (package or 'unknown package',
-                                        FAVORITE_HASH,
-                                        self.gotten_hash)
-
-
-class HashUnpinned(HashError):
-    """A requirement had a hash specified but was not pinned to a specific
-    version."""
-
-    order = 3
-    head = ('In --require-hashes mode, all requirements must have their '
-            'versions pinned with ==. These do not:')
-
-
-class HashMismatch(HashError):
-    """
-    Distribution file hash values don't match.
-
-    :ivar package_name: The name of the package that triggered the hash
-        mismatch. Feel free to write to this after the exception is raise to
-        improve its error message.
-
-    """
-    order = 4
-    head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
-            'FILE. If you have updated the package versions, please update '
-            'the hashes. Otherwise, examine the package contents carefully; '
-            'someone may have tampered with them.')
-
-    def __init__(self, allowed, gots):
-        """
-        :param allowed: A dict of algorithm names pointing to lists of allowed
-            hex digests
-        :param gots: A dict of algorithm names pointing to hashes we
-            actually got from the files under suspicion
-        """
-        self.allowed = allowed
-        self.gots = gots
-
-    def body(self):
-        return '    %s:\n%s' % (self._requirement_name(),
-                                self._hash_comparison())
-
-    def _hash_comparison(self):
-        """
-        Return a comparison of actual and expected hash values.
-
-        Example::
-
-               Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
-                            or 123451234512345123451234512345123451234512345
-                    Got        bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
-
-        """
-        def hash_then_or(hash_name):
-            # For now, all the decent hashes have 6-char names, so we can get
-            # away with hard-coding space literals.
-            return chain([hash_name], repeat('    or'))
-
-        lines = []
-        for hash_name, expecteds in iteritems(self.allowed):
-            prefix = hash_then_or(hash_name)
-            lines.extend(('        Expected %s %s' % (next(prefix), e))
-                         for e in expecteds)
-            lines.append('             Got        %s\n' %
-                         self.gots[hash_name].hexdigest())
-        return '\n'.join(lines)
-
-
-class UnsupportedPythonVersion(InstallationError):
-    """Unsupported python version according to Requires-Python package
-    metadata."""
-
-
-class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
-    """When there are errors while loading a configuration file
-    """
-
-    def __init__(self, reason="could not be loaded", fname=None, error=None):
-        super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
-        self.reason = reason
-        self.fname = fname
-        self.error = error
-
-    def __str__(self):
-        if self.fname is not None:
-            message_part = " in {}.".format(self.fname)
-        else:
-            assert self.error is not None
-            message_part = ".\n{}\n".format(self.error.message)
-        return "Configuration file {}{}".format(self.reason, message_part)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/index.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/index.py
deleted file mode 100644
index 897444a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/index.py
+++ /dev/null
@@ -1,992 +0,0 @@
-"""Routines related to PyPI, indexes"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import re
-
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.exceptions import (
-    BestVersionAlreadyInstalled,
-    DistributionNotFound,
-    InvalidWheelFilename,
-    UnsupportedWheel,
-)
-from pip._internal.models.candidate import InstallationCandidate
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.link import Link
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.models.target_python import TargetPython
-from pip._internal.utils.filetypes import WHEEL_EXTENSION
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import build_netloc
-from pip._internal.utils.packaging import check_requires_python
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
-from pip._internal.utils.urls import url_to_path
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union,
-    )
-    from pip._vendor.packaging.version import _BaseVersion
-    from pip._internal.collector import LinkCollector
-    from pip._internal.models.search_scope import SearchScope
-    from pip._internal.req import InstallRequirement
-    from pip._internal.pep425tags import Pep425Tag
-    from pip._internal.utils.hashes import Hashes
-
-    BuildTag = Union[Tuple[()], Tuple[int, str]]
-    CandidateSortingKey = (
-        Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]]
-    )
-
-
-__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder']
-
-
-logger = logging.getLogger(__name__)
-
-
-def _check_link_requires_python(
-    link,  # type: Link
-    version_info,  # type: Tuple[int, int, int]
-    ignore_requires_python=False,  # type: bool
-):
-    # type: (...) -> bool
-    """
-    Return whether the given Python version is compatible with a link's
-    "Requires-Python" value.
-
-    :param version_info: A 3-tuple of ints representing the Python
-        major-minor-micro version to check.
-    :param ignore_requires_python: Whether to ignore the "Requires-Python"
-        value if the given Python version isn't compatible.
-    """
-    try:
-        is_compatible = check_requires_python(
-            link.requires_python, version_info=version_info,
-        )
-    except specifiers.InvalidSpecifier:
-        logger.debug(
-            "Ignoring invalid Requires-Python (%r) for link: %s",
-            link.requires_python, link,
-        )
-    else:
-        if not is_compatible:
-            version = '.'.join(map(str, version_info))
-            if not ignore_requires_python:
-                logger.debug(
-                    'Link requires a different Python (%s not in: %r): %s',
-                    version, link.requires_python, link,
-                )
-                return False
-
-            logger.debug(
-                'Ignoring failed Requires-Python check (%s not in: %r) '
-                'for link: %s',
-                version, link.requires_python, link,
-            )
-
-    return True
-
-
-class LinkEvaluator(object):
-
-    """
-    Responsible for evaluating links for a particular project.
-    """
-
-    _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
-
-    # Don't include an allow_yanked default value to make sure each call
-    # site considers whether yanked releases are allowed. This also causes
-    # that decision to be made explicit in the calling code, which helps
-    # people when reading the code.
-    def __init__(
-        self,
-        project_name,    # type: str
-        canonical_name,  # type: str
-        formats,         # type: FrozenSet
-        target_python,   # type: TargetPython
-        allow_yanked,    # type: bool
-        ignore_requires_python=None,  # type: Optional[bool]
-    ):
-        # type: (...) -> None
-        """
-        :param project_name: The user supplied package name.
-        :param canonical_name: The canonical package name.
-        :param formats: The formats allowed for this package. Should be a set
-            with 'binary' or 'source' or both in it.
-        :param target_python: The target Python interpreter to use when
-            evaluating link compatibility. This is used, for example, to
-            check wheel compatibility, as well as when checking the Python
-            version, e.g. the Python version embedded in a link filename
-            (or egg fragment) and against an HTML link's optional PEP 503
-            "data-requires-python" attribute.
-        :param allow_yanked: Whether files marked as yanked (in the sense
-            of PEP 592) are permitted to be candidates for install.
-        :param ignore_requires_python: Whether to ignore incompatible
-            PEP 503 "data-requires-python" values in HTML links. Defaults
-            to False.
-        """
-        if ignore_requires_python is None:
-            ignore_requires_python = False
-
-        self._allow_yanked = allow_yanked
-        self._canonical_name = canonical_name
-        self._ignore_requires_python = ignore_requires_python
-        self._formats = formats
-        self._target_python = target_python
-
-        self.project_name = project_name
-
-    def evaluate_link(self, link):
-        # type: (Link) -> Tuple[bool, Optional[Text]]
-        """
-        Determine whether a link is a candidate for installation.
-
-        :return: A tuple (is_candidate, result), where `result` is (1) a
-            version string if `is_candidate` is True, and (2) if
-            `is_candidate` is False, an optional string to log the reason
-            the link fails to qualify.
-        """
-        version = None
-        if link.is_yanked and not self._allow_yanked:
-            reason = link.yanked_reason or ''
-            # Mark this as a unicode string to prevent "UnicodeEncodeError:
-            # 'ascii' codec can't encode character" in Python 2 when
-            # the reason contains non-ascii characters.
-            return (False, u'yanked for reason: {}'.format(reason))
-
-        if link.egg_fragment:
-            egg_info = link.egg_fragment
-            ext = link.ext
-        else:
-            egg_info, ext = link.splitext()
-            if not ext:
-                return (False, 'not a file')
-            if ext not in SUPPORTED_EXTENSIONS:
-                return (False, 'unsupported archive format: %s' % ext)
-            if "binary" not in self._formats and ext == WHEEL_EXTENSION:
-                reason = 'No binaries permitted for %s' % self.project_name
-                return (False, reason)
-            if "macosx10" in link.path and ext == '.zip':
-                return (False, 'macosx10 one')
-            if ext == WHEEL_EXTENSION:
-                try:
-                    wheel = Wheel(link.filename)
-                except InvalidWheelFilename:
-                    return (False, 'invalid wheel filename')
-                if canonicalize_name(wheel.name) != self._canonical_name:
-                    reason = 'wrong project name (not %s)' % self.project_name
-                    return (False, reason)
-
-                supported_tags = self._target_python.get_tags()
-                if not wheel.supported(supported_tags):
-                    # Include the wheel's tags in the reason string to
-                    # simplify troubleshooting compatibility issues.
-                    file_tags = wheel.get_formatted_file_tags()
-                    reason = (
-                        "none of the wheel's tags match: {}".format(
-                            ', '.join(file_tags)
-                        )
-                    )
-                    return (False, reason)
-
-                version = wheel.version
-
-        # This should be up by the self.ok_binary check, but see issue 2700.
-        if "source" not in self._formats and ext != WHEEL_EXTENSION:
-            return (False, 'No sources permitted for %s' % self.project_name)
-
-        if not version:
-            version = _extract_version_from_fragment(
-                egg_info, self._canonical_name,
-            )
-        if not version:
-            return (
-                False, 'Missing project version for %s' % self.project_name,
-            )
-
-        match = self._py_version_re.search(version)
-        if match:
-            version = version[:match.start()]
-            py_version = match.group(1)
-            if py_version != self._target_python.py_version:
-                return (False, 'Python version is incorrect')
-
-        supports_python = _check_link_requires_python(
-            link, version_info=self._target_python.py_version_info,
-            ignore_requires_python=self._ignore_requires_python,
-        )
-        if not supports_python:
-            # Return None for the reason text to suppress calling
-            # _log_skipped_link().
-            return (False, None)
-
-        logger.debug('Found link %s, version: %s', link, version)
-
-        return (True, version)
-
-
-def filter_unallowed_hashes(
-    candidates,    # type: List[InstallationCandidate]
-    hashes,        # type: Hashes
-    project_name,  # type: str
-):
-    # type: (...) -> List[InstallationCandidate]
-    """
-    Filter out candidates whose hashes aren't allowed, and return a new
-    list of candidates.
-
-    If at least one candidate has an allowed hash, then all candidates with
-    either an allowed hash or no hash specified are returned.  Otherwise,
-    the given candidates are returned.
-
-    Including the candidates with no hash specified when there is a match
-    allows a warning to be logged if there is a more preferred candidate
-    with no hash specified.  Returning all candidates in the case of no
-    matches lets pip report the hash of the candidate that would otherwise
-    have been installed (e.g. permitting the user to more easily update
-    their requirements file with the desired hash).
-    """
-    if not hashes:
-        logger.debug(
-            'Given no hashes to check %s links for project %r: '
-            'discarding no candidates',
-            len(candidates),
-            project_name,
-        )
-        # Make sure we're not returning back the given value.
-        return list(candidates)
-
-    matches_or_no_digest = []
-    # Collect the non-matches for logging purposes.
-    non_matches = []
-    match_count = 0
-    for candidate in candidates:
-        link = candidate.link
-        if not link.has_hash:
-            pass
-        elif link.is_hash_allowed(hashes=hashes):
-            match_count += 1
-        else:
-            non_matches.append(candidate)
-            continue
-
-        matches_or_no_digest.append(candidate)
-
-    if match_count:
-        filtered = matches_or_no_digest
-    else:
-        # Make sure we're not returning back the given value.
-        filtered = list(candidates)
-
-    if len(filtered) == len(candidates):
-        discard_message = 'discarding no candidates'
-    else:
-        discard_message = 'discarding {} non-matches:\n  {}'.format(
-            len(non_matches),
-            '\n  '.join(str(candidate.link) for candidate in non_matches)
-        )
-
-    logger.debug(
-        'Checked %s links for project %r against %s hashes '
-        '(%s matches, %s no digest): %s',
-        len(candidates),
-        project_name,
-        hashes.digest_count,
-        match_count,
-        len(matches_or_no_digest) - match_count,
-        discard_message
-    )
-
-    return filtered
-
-
-class CandidatePreferences(object):
-
-    """
-    Encapsulates some of the preferences for filtering and sorting
-    InstallationCandidate objects.
-    """
-
-    def __init__(
-        self,
-        prefer_binary=False,  # type: bool
-        allow_all_prereleases=False,  # type: bool
-    ):
-        # type: (...) -> None
-        """
-        :param allow_all_prereleases: Whether to allow all pre-releases.
-        """
-        self.allow_all_prereleases = allow_all_prereleases
-        self.prefer_binary = prefer_binary
-
-
-class BestCandidateResult(object):
-    """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
-
-    This class is only intended to be instantiated by CandidateEvaluator's
-    `compute_best_candidate()` method.
-    """
-
-    def __init__(
-        self,
-        candidates,             # type: List[InstallationCandidate]
-        applicable_candidates,  # type: List[InstallationCandidate]
-        best_candidate,         # type: Optional[InstallationCandidate]
-    ):
-        # type: (...) -> None
-        """
-        :param candidates: A sequence of all available candidates found.
-        :param applicable_candidates: The applicable candidates.
-        :param best_candidate: The most preferred candidate found, or None
-            if no applicable candidates were found.
-        """
-        assert set(applicable_candidates) <= set(candidates)
-
-        if best_candidate is None:
-            assert not applicable_candidates
-        else:
-            assert best_candidate in applicable_candidates
-
-        self._applicable_candidates = applicable_candidates
-        self._candidates = candidates
-
-        self.best_candidate = best_candidate
-
-    def iter_all(self):
-        # type: () -> Iterable[InstallationCandidate]
-        """Iterate through all candidates.
-        """
-        return iter(self._candidates)
-
-    def iter_applicable(self):
-        # type: () -> Iterable[InstallationCandidate]
-        """Iterate through the applicable candidates.
-        """
-        return iter(self._applicable_candidates)
-
-
-class CandidateEvaluator(object):
-
-    """
-    Responsible for filtering and sorting candidates for installation based
-    on what tags are valid.
-    """
-
-    @classmethod
-    def create(
-        cls,
-        project_name,         # type: str
-        target_python=None,   # type: Optional[TargetPython]
-        prefer_binary=False,  # type: bool
-        allow_all_prereleases=False,  # type: bool
-        specifier=None,       # type: Optional[specifiers.BaseSpecifier]
-        hashes=None,          # type: Optional[Hashes]
-    ):
-        # type: (...) -> CandidateEvaluator
-        """Create a CandidateEvaluator object.
-
-        :param target_python: The target Python interpreter to use when
-            checking compatibility. If None (the default), a TargetPython
-            object will be constructed from the running Python.
-        :param specifier: An optional object implementing `filter`
-            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
-            versions.
-        :param hashes: An optional collection of allowed hashes.
-        """
-        if target_python is None:
-            target_python = TargetPython()
-        if specifier is None:
-            specifier = specifiers.SpecifierSet()
-
-        supported_tags = target_python.get_tags()
-
-        return cls(
-            project_name=project_name,
-            supported_tags=supported_tags,
-            specifier=specifier,
-            prefer_binary=prefer_binary,
-            allow_all_prereleases=allow_all_prereleases,
-            hashes=hashes,
-        )
-
-    def __init__(
-        self,
-        project_name,         # type: str
-        supported_tags,       # type: List[Pep425Tag]
-        specifier,            # type: specifiers.BaseSpecifier
-        prefer_binary=False,  # type: bool
-        allow_all_prereleases=False,  # type: bool
-        hashes=None,                  # type: Optional[Hashes]
-    ):
-        # type: (...) -> None
-        """
-        :param supported_tags: The PEP 425 tags supported by the target
-            Python in order of preference (most preferred first).
-        """
-        self._allow_all_prereleases = allow_all_prereleases
-        self._hashes = hashes
-        self._prefer_binary = prefer_binary
-        self._project_name = project_name
-        self._specifier = specifier
-        self._supported_tags = supported_tags
-
-    def get_applicable_candidates(
-        self,
-        candidates,  # type: List[InstallationCandidate]
-    ):
-        # type: (...) -> List[InstallationCandidate]
-        """
-        Return the applicable candidates from a list of candidates.
-        """
-        # Using None infers from the specifier instead.
-        allow_prereleases = self._allow_all_prereleases or None
-        specifier = self._specifier
-        versions = {
-            str(v) for v in specifier.filter(
-                # We turn the version object into a str here because otherwise
-                # when we're debundled but setuptools isn't, Python will see
-                # packaging.version.Version and
-                # pkg_resources._vendor.packaging.version.Version as different
-                # types. This way we'll use a str as a common data interchange
-                # format. If we stop using the pkg_resources provided specifier
-                # and start using our own, we can drop the cast to str().
-                (str(c.version) for c in candidates),
-                prereleases=allow_prereleases,
-            )
-        }
-
-        # Again, converting version to str to deal with debundling.
-        applicable_candidates = [
-            c for c in candidates if str(c.version) in versions
-        ]
-
-        return filter_unallowed_hashes(
-            candidates=applicable_candidates,
-            hashes=self._hashes,
-            project_name=self._project_name,
-        )
-
-    def _sort_key(self, candidate):
-        # type: (InstallationCandidate) -> CandidateSortingKey
-        """
-        Function to pass as the `key` argument to a call to sorted() to sort
-        InstallationCandidates by preference.
-
-        Returns a tuple such that tuples sorting as greater using Python's
-        default comparison operator are more preferred.
-
-        The preference is as follows:
-
-        First and foremost, candidates with allowed (matching) hashes are
-        always preferred over candidates without matching hashes. This is
-        because e.g. if the only candidate with an allowed hash is yanked,
-        we still want to use that candidate.
-
-        Second, excepting hash considerations, candidates that have been
-        yanked (in the sense of PEP 592) are always less preferred than
-        candidates that haven't been yanked. Then:
-
-        If not finding wheels, they are sorted by version only.
-        If finding wheels, then the sort order is by version, then:
-          1. existing installs
-          2. wheels ordered via Wheel.support_index_min(self._supported_tags)
-          3. source archives
-        If prefer_binary was set, then all wheels are sorted above sources.
-
-        Note: it was considered to embed this logic into the Link
-              comparison operators, but then different sdist links
-              with the same version, would have to be considered equal
-        """
-        valid_tags = self._supported_tags
-        support_num = len(valid_tags)
-        build_tag = ()  # type: BuildTag
-        binary_preference = 0
-        link = candidate.link
-        if link.is_wheel:
-            # can raise InvalidWheelFilename
-            wheel = Wheel(link.filename)
-            if not wheel.supported(valid_tags):
-                raise UnsupportedWheel(
-                    "%s is not a supported wheel for this platform. It "
-                    "can't be sorted." % wheel.filename
-                )
-            if self._prefer_binary:
-                binary_preference = 1
-            pri = -(wheel.support_index_min(valid_tags))
-            if wheel.build_tag is not None:
-                match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
-                build_tag_groups = match.groups()
-                build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
-        else:  # sdist
-            pri = -(support_num)
-        has_allowed_hash = int(link.is_hash_allowed(self._hashes))
-        yank_value = -1 * int(link.is_yanked)  # -1 for yanked.
-        return (
-            has_allowed_hash, yank_value, binary_preference, candidate.version,
-            build_tag, pri,
-        )
-
-    def sort_best_candidate(
-        self,
-        candidates,    # type: List[InstallationCandidate]
-    ):
-        # type: (...) -> Optional[InstallationCandidate]
-        """
-        Return the best candidate per the instance's sort order, or None if
-        no candidate is acceptable.
-        """
-        if not candidates:
-            return None
-
-        best_candidate = max(candidates, key=self._sort_key)
-
-        # Log a warning per PEP 592 if necessary before returning.
-        link = best_candidate.link
-        if link.is_yanked:
-            reason = link.yanked_reason or ''
-            msg = (
-                # Mark this as a unicode string to prevent
-                # "UnicodeEncodeError: 'ascii' codec can't encode character"
-                # in Python 2 when the reason contains non-ascii characters.
-                u'The candidate selected for download or install is a '
-                'yanked version: {candidate}\n'
-                'Reason for being yanked: {reason}'
-            ).format(candidate=best_candidate, reason=reason)
-            logger.warning(msg)
-
-        return best_candidate
-
-    def compute_best_candidate(
-        self,
-        candidates,      # type: List[InstallationCandidate]
-    ):
-        # type: (...) -> BestCandidateResult
-        """
-        Compute and return a `BestCandidateResult` instance.
-        """
-        applicable_candidates = self.get_applicable_candidates(candidates)
-
-        best_candidate = self.sort_best_candidate(applicable_candidates)
-
-        return BestCandidateResult(
-            candidates,
-            applicable_candidates=applicable_candidates,
-            best_candidate=best_candidate,
-        )
-
-
-class PackageFinder(object):
-    """This finds packages.
-
-    This is meant to match easy_install's technique for looking for
-    packages, by reading pages and looking for appropriate links.
-    """
-
-    def __init__(
-        self,
-        link_collector,       # type: LinkCollector
-        target_python,        # type: TargetPython
-        allow_yanked,         # type: bool
-        format_control=None,  # type: Optional[FormatControl]
-        candidate_prefs=None,         # type: CandidatePreferences
-        ignore_requires_python=None,  # type: Optional[bool]
-    ):
-        # type: (...) -> None
-        """
-        This constructor is primarily meant to be used by the create() class
-        method and from tests.
-
-        :param format_control: A FormatControl object, used to control
-            the selection of source packages / binary packages when consulting
-            the index and links.
-        :param candidate_prefs: Options to use when creating a
-            CandidateEvaluator object.
-        """
-        if candidate_prefs is None:
-            candidate_prefs = CandidatePreferences()
-
-        format_control = format_control or FormatControl(set(), set())
-
-        self._allow_yanked = allow_yanked
-        self._candidate_prefs = candidate_prefs
-        self._ignore_requires_python = ignore_requires_python
-        self._link_collector = link_collector
-        self._target_python = target_python
-
-        self.format_control = format_control
-
-        # These are boring links that have already been logged somehow.
-        self._logged_links = set()  # type: Set[Link]
-
-    # Don't include an allow_yanked default value to make sure each call
-    # site considers whether yanked releases are allowed. This also causes
-    # that decision to be made explicit in the calling code, which helps
-    # people when reading the code.
-    @classmethod
-    def create(
-        cls,
-        link_collector,      # type: LinkCollector
-        selection_prefs,     # type: SelectionPreferences
-        target_python=None,  # type: Optional[TargetPython]
-    ):
-        # type: (...) -> PackageFinder
-        """Create a PackageFinder.
-
-        :param selection_prefs: The candidate selection preferences, as a
-            SelectionPreferences object.
-        :param target_python: The target Python interpreter to use when
-            checking compatibility. If None (the default), a TargetPython
-            object will be constructed from the running Python.
-        """
-        if target_python is None:
-            target_python = TargetPython()
-
-        candidate_prefs = CandidatePreferences(
-            prefer_binary=selection_prefs.prefer_binary,
-            allow_all_prereleases=selection_prefs.allow_all_prereleases,
-        )
-
-        return cls(
-            candidate_prefs=candidate_prefs,
-            link_collector=link_collector,
-            target_python=target_python,
-            allow_yanked=selection_prefs.allow_yanked,
-            format_control=selection_prefs.format_control,
-            ignore_requires_python=selection_prefs.ignore_requires_python,
-        )
-
-    @property
-    def search_scope(self):
-        # type: () -> SearchScope
-        return self._link_collector.search_scope
-
-    @search_scope.setter
-    def search_scope(self, search_scope):
-        # type: (SearchScope) -> None
-        self._link_collector.search_scope = search_scope
-
-    @property
-    def find_links(self):
-        # type: () -> List[str]
-        return self._link_collector.find_links
-
-    @property
-    def index_urls(self):
-        # type: () -> List[str]
-        return self.search_scope.index_urls
-
-    @property
-    def trusted_hosts(self):
-        # type: () -> Iterable[str]
-        for host_port in self._link_collector.session.pip_trusted_origins:
-            yield build_netloc(*host_port)
-
-    @property
-    def allow_all_prereleases(self):
-        # type: () -> bool
-        return self._candidate_prefs.allow_all_prereleases
-
-    def set_allow_all_prereleases(self):
-        # type: () -> None
-        self._candidate_prefs.allow_all_prereleases = True
-
-    def make_link_evaluator(self, project_name):
-        # type: (str) -> LinkEvaluator
-        canonical_name = canonicalize_name(project_name)
-        formats = self.format_control.get_allowed_formats(canonical_name)
-
-        return LinkEvaluator(
-            project_name=project_name,
-            canonical_name=canonical_name,
-            formats=formats,
-            target_python=self._target_python,
-            allow_yanked=self._allow_yanked,
-            ignore_requires_python=self._ignore_requires_python,
-        )
-
-    def _sort_links(self, links):
-        # type: (Iterable[Link]) -> List[Link]
-        """
-        Returns elements of links in order, non-egg links first, egg links
-        second, while eliminating duplicates
-        """
-        eggs, no_eggs = [], []
-        seen = set()  # type: Set[Link]
-        for link in links:
-            if link not in seen:
-                seen.add(link)
-                if link.egg_fragment:
-                    eggs.append(link)
-                else:
-                    no_eggs.append(link)
-        return no_eggs + eggs
-
-    def _log_skipped_link(self, link, reason):
-        # type: (Link, Text) -> None
-        if link not in self._logged_links:
-            # Mark this as a unicode string to prevent "UnicodeEncodeError:
-            # 'ascii' codec can't encode character" in Python 2 when
-            # the reason contains non-ascii characters.
-            #   Also, put the link at the end so the reason is more visible
-            # and because the link string is usually very long.
-            logger.debug(u'Skipping link: %s: %s', reason, link)
-            self._logged_links.add(link)
-
-    def get_install_candidate(self, link_evaluator, link):
-        # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate]
-        """
-        If the link is a candidate for install, convert it to an
-        InstallationCandidate and return it. Otherwise, return None.
-        """
-        is_candidate, result = link_evaluator.evaluate_link(link)
-        if not is_candidate:
-            if result:
-                self._log_skipped_link(link, reason=result)
-            return None
-
-        return InstallationCandidate(
-            project=link_evaluator.project_name,
-            link=link,
-            # Convert the Text result to str since InstallationCandidate
-            # accepts str.
-            version=str(result),
-        )
-
-    def evaluate_links(self, link_evaluator, links):
-        # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate]
-        """
-        Convert links that are candidates to InstallationCandidate objects.
-        """
-        candidates = []
-        for link in self._sort_links(links):
-            candidate = self.get_install_candidate(link_evaluator, link)
-            if candidate is not None:
-                candidates.append(candidate)
-
-        return candidates
-
-    def find_all_candidates(self, project_name):
-        # type: (str) -> List[InstallationCandidate]
-        """Find all available InstallationCandidate for project_name
-
-        This checks index_urls and find_links.
-        All versions found are returned as an InstallationCandidate list.
-
-        See LinkEvaluator.evaluate_link() for details on which files
-        are accepted.
-        """
-        collected_links = self._link_collector.collect_links(project_name)
-
-        link_evaluator = self.make_link_evaluator(project_name)
-
-        find_links_versions = self.evaluate_links(
-            link_evaluator,
-            links=collected_links.find_links,
-        )
-
-        page_versions = []
-        for page_url, page_links in collected_links.pages.items():
-            logger.debug('Analyzing links from page %s', page_url)
-            with indent_log():
-                new_versions = self.evaluate_links(
-                    link_evaluator,
-                    links=page_links,
-                )
-                page_versions.extend(new_versions)
-
-        file_versions = self.evaluate_links(
-            link_evaluator,
-            links=collected_links.files,
-        )
-        if file_versions:
-            file_versions.sort(reverse=True)
-            logger.debug(
-                'Local files found: %s',
-                ', '.join([
-                    url_to_path(candidate.link.url)
-                    for candidate in file_versions
-                ])
-            )
-
-        # This is an intentional priority ordering
-        return file_versions + find_links_versions + page_versions
-
-    def make_candidate_evaluator(
-        self,
-        project_name,    # type: str
-        specifier=None,  # type: Optional[specifiers.BaseSpecifier]
-        hashes=None,     # type: Optional[Hashes]
-    ):
-        # type: (...) -> CandidateEvaluator
-        """Create a CandidateEvaluator object to use.
-        """
-        candidate_prefs = self._candidate_prefs
-        return CandidateEvaluator.create(
-            project_name=project_name,
-            target_python=self._target_python,
-            prefer_binary=candidate_prefs.prefer_binary,
-            allow_all_prereleases=candidate_prefs.allow_all_prereleases,
-            specifier=specifier,
-            hashes=hashes,
-        )
-
-    def find_best_candidate(
-        self,
-        project_name,       # type: str
-        specifier=None,     # type: Optional[specifiers.BaseSpecifier]
-        hashes=None,        # type: Optional[Hashes]
-    ):
-        # type: (...) -> BestCandidateResult
-        """Find matches for the given project and specifier.
-
-        :param specifier: An optional object implementing `filter`
-            (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
-            versions.
-
-        :return: A `BestCandidateResult` instance.
-        """
-        candidates = self.find_all_candidates(project_name)
-        candidate_evaluator = self.make_candidate_evaluator(
-            project_name=project_name,
-            specifier=specifier,
-            hashes=hashes,
-        )
-        return candidate_evaluator.compute_best_candidate(candidates)
-
-    def find_requirement(self, req, upgrade):
-        # type: (InstallRequirement, bool) -> Optional[Link]
-        """Try to find a Link matching req
-
-        Expects req, an InstallRequirement and upgrade, a boolean
-        Returns a Link if found,
-        Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
-        """
-        hashes = req.hashes(trust_internet=False)
-        best_candidate_result = self.find_best_candidate(
-            req.name, specifier=req.specifier, hashes=hashes,
-        )
-        best_candidate = best_candidate_result.best_candidate
-
-        installed_version = None    # type: Optional[_BaseVersion]
-        if req.satisfied_by is not None:
-            installed_version = parse_version(req.satisfied_by.version)
-
-        def _format_versions(cand_iter):
-            # This repeated parse_version and str() conversion is needed to
-            # handle different vendoring sources from pip and pkg_resources.
-            # If we stop using the pkg_resources provided specifier and start
-            # using our own, we can drop the cast to str().
-            return ", ".join(sorted(
-                {str(c.version) for c in cand_iter},
-                key=parse_version,
-            )) or "none"
-
-        if installed_version is None and best_candidate is None:
-            logger.critical(
-                'Could not find a version that satisfies the requirement %s '
-                '(from versions: %s)',
-                req,
-                _format_versions(best_candidate_result.iter_all()),
-            )
-
-            raise DistributionNotFound(
-                'No matching distribution found for %s' % req
-            )
-
-        best_installed = False
-        if installed_version and (
-                best_candidate is None or
-                best_candidate.version <= installed_version):
-            best_installed = True
-
-        if not upgrade and installed_version is not None:
-            if best_installed:
-                logger.debug(
-                    'Existing installed version (%s) is most up-to-date and '
-                    'satisfies requirement',
-                    installed_version,
-                )
-            else:
-                logger.debug(
-                    'Existing installed version (%s) satisfies requirement '
-                    '(most up-to-date version is %s)',
-                    installed_version,
-                    best_candidate.version,
-                )
-            return None
-
-        if best_installed:
-            # We have an existing version, and its the best version
-            logger.debug(
-                'Installed version (%s) is most up-to-date (past versions: '
-                '%s)',
-                installed_version,
-                _format_versions(best_candidate_result.iter_applicable()),
-            )
-            raise BestVersionAlreadyInstalled
-
-        logger.debug(
-            'Using version %s (newest of versions: %s)',
-            best_candidate.version,
-            _format_versions(best_candidate_result.iter_applicable()),
-        )
-        return best_candidate.link
-
-
-def _find_name_version_sep(fragment, canonical_name):
-    # type: (str, str) -> int
-    """Find the separator's index based on the package's canonical name.
-
-    :param fragment: A + filename "fragment" (stem) or
-        egg fragment.
-    :param canonical_name: The package's canonical name.
-
-    This function is needed since the canonicalized name does not necessarily
-    have the same length as the egg info's name part. An example::
-
-    >>> fragment = 'foo__bar-1.0'
-    >>> canonical_name = 'foo-bar'
-    >>> _find_name_version_sep(fragment, canonical_name)
-    8
-    """
-    # Project name and version must be separated by one single dash. Find all
-    # occurrences of dashes; if the string in front of it matches the canonical
-    # name, this is the one separating the name and version parts.
-    for i, c in enumerate(fragment):
-        if c != "-":
-            continue
-        if canonicalize_name(fragment[:i]) == canonical_name:
-            return i
-    raise ValueError("{} does not match {}".format(fragment, canonical_name))
-
-
-def _extract_version_from_fragment(fragment, canonical_name):
-    # type: (str, str) -> Optional[str]
-    """Parse the version string from a + filename
-    "fragment" (stem) or egg fragment.
-
-    :param fragment: The string to parse. E.g. foo-2.1
-    :param canonical_name: The canonicalized name of the package this
-        belongs to.
-    """
-    try:
-        version_start = _find_name_version_sep(fragment, canonical_name) + 1
-    except ValueError:
-        return None
-    version = fragment[version_start:]
-    if not version:
-        return None
-    return version
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
deleted file mode 100644
index c24158f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/legacy_resolve.py
+++ /dev/null
@@ -1,460 +0,0 @@
-"""Dependency Resolution
-
-The dependency resolution in pip is performed as follows:
-
-for top-level requirements:
-    a. only one spec allowed per project, regardless of conflicts or not.
-       otherwise a "double requirement" exception is raised
-    b. they override sub-dependency requirements.
-for sub-dependencies
-    a. "first found, wins" (where the order is breadth first)
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import logging
-import sys
-from collections import defaultdict
-from itertools import chain
-
-from pip._vendor.packaging import specifiers
-
-from pip._internal.exceptions import (
-    BestVersionAlreadyInstalled,
-    DistributionNotFound,
-    HashError,
-    HashErrors,
-    UnsupportedPythonVersion,
-)
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
-    dist_in_usersite,
-    ensure_dir,
-    normalize_version_info,
-)
-from pip._internal.utils.packaging import (
-    check_requires_python,
-    get_requires_python,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Callable, DefaultDict, List, Optional, Set, Tuple
-    from pip._vendor import pkg_resources
-
-    from pip._internal.distributions import AbstractDistribution
-    from pip._internal.network.session import PipSession
-    from pip._internal.index import PackageFinder
-    from pip._internal.operations.prepare import RequirementPreparer
-    from pip._internal.req.req_install import InstallRequirement
-    from pip._internal.req.req_set import RequirementSet
-
-    InstallRequirementProvider = Callable[
-        [str, InstallRequirement], InstallRequirement
-    ]
-
-logger = logging.getLogger(__name__)
-
-
-def _check_dist_requires_python(
-    dist,  # type: pkg_resources.Distribution
-    version_info,  # type: Tuple[int, int, int]
-    ignore_requires_python=False,  # type: bool
-):
-    # type: (...) -> None
-    """
-    Check whether the given Python version is compatible with a distribution's
-    "Requires-Python" value.
-
-    :param version_info: A 3-tuple of ints representing the Python
-        major-minor-micro version to check.
-    :param ignore_requires_python: Whether to ignore the "Requires-Python"
-        value if the given Python version isn't compatible.
-
-    :raises UnsupportedPythonVersion: When the given Python version isn't
-        compatible.
-    """
-    requires_python = get_requires_python(dist)
-    try:
-        is_compatible = check_requires_python(
-            requires_python, version_info=version_info,
-        )
-    except specifiers.InvalidSpecifier as exc:
-        logger.warning(
-            "Package %r has an invalid Requires-Python: %s",
-            dist.project_name, exc,
-        )
-        return
-
-    if is_compatible:
-        return
-
-    version = '.'.join(map(str, version_info))
-    if ignore_requires_python:
-        logger.debug(
-            'Ignoring failed Requires-Python check for package %r: '
-            '%s not in %r',
-            dist.project_name, version, requires_python,
-        )
-        return
-
-    raise UnsupportedPythonVersion(
-        'Package {!r} requires a different Python: {} not in {!r}'.format(
-            dist.project_name, version, requires_python,
-        ))
-
-
-class Resolver(object):
-    """Resolves which packages need to be installed/uninstalled to perform \
-    the requested operation without breaking the requirements of any package.
-    """
-
-    _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
-    def __init__(
-        self,
-        preparer,  # type: RequirementPreparer
-        session,  # type: PipSession
-        finder,  # type: PackageFinder
-        make_install_req,  # type: InstallRequirementProvider
-        use_user_site,  # type: bool
-        ignore_dependencies,  # type: bool
-        ignore_installed,  # type: bool
-        ignore_requires_python,  # type: bool
-        force_reinstall,  # type: bool
-        upgrade_strategy,  # type: str
-        py_version_info=None,  # type: Optional[Tuple[int, ...]]
-    ):
-        # type: (...) -> None
-        super(Resolver, self).__init__()
-        assert upgrade_strategy in self._allowed_strategies
-
-        if py_version_info is None:
-            py_version_info = sys.version_info[:3]
-        else:
-            py_version_info = normalize_version_info(py_version_info)
-
-        self._py_version_info = py_version_info
-
-        self.preparer = preparer
-        self.finder = finder
-        self.session = session
-
-        # This is set in resolve
-        self.require_hashes = None  # type: Optional[bool]
-
-        self.upgrade_strategy = upgrade_strategy
-        self.force_reinstall = force_reinstall
-        self.ignore_dependencies = ignore_dependencies
-        self.ignore_installed = ignore_installed
-        self.ignore_requires_python = ignore_requires_python
-        self.use_user_site = use_user_site
-        self._make_install_req = make_install_req
-
-        self._discovered_dependencies = \
-            defaultdict(list)  # type: DefaultDict[str, List]
-
-    def resolve(self, requirement_set):
-        # type: (RequirementSet) -> None
-        """Resolve what operations need to be done
-
-        As a side-effect of this method, the packages (and their dependencies)
-        are downloaded, unpacked and prepared for installation. This
-        preparation is done by ``pip.operations.prepare``.
-
-        Once PyPI has static dependency metadata available, it would be
-        possible to move the preparation to become a step separated from
-        dependency resolution.
-        """
-        # make the wheelhouse
-        if self.preparer.wheel_download_dir:
-            ensure_dir(self.preparer.wheel_download_dir)
-
-        # If any top-level requirement has a hash specified, enter
-        # hash-checking mode, which requires hashes from all.
-        root_reqs = (
-            requirement_set.unnamed_requirements +
-            list(requirement_set.requirements.values())
-        )
-        self.require_hashes = (
-            requirement_set.require_hashes or
-            any(req.has_hash_options for req in root_reqs)
-        )
-
-        # Display where finder is looking for packages
-        search_scope = self.finder.search_scope
-        locations = search_scope.get_formatted_locations()
-        if locations:
-            logger.info(locations)
-
-        # Actually prepare the files, and collect any exceptions. Most hash
-        # exceptions cannot be checked ahead of time, because
-        # req.populate_link() needs to be called before we can make decisions
-        # based on link type.
-        discovered_reqs = []  # type: List[InstallRequirement]
-        hash_errors = HashErrors()
-        for req in chain(root_reqs, discovered_reqs):
-            try:
-                discovered_reqs.extend(
-                    self._resolve_one(requirement_set, req)
-                )
-            except HashError as exc:
-                exc.req = req
-                hash_errors.append(exc)
-
-        if hash_errors:
-            raise hash_errors
-
-    def _is_upgrade_allowed(self, req):
-        # type: (InstallRequirement) -> bool
-        if self.upgrade_strategy == "to-satisfy-only":
-            return False
-        elif self.upgrade_strategy == "eager":
-            return True
-        else:
-            assert self.upgrade_strategy == "only-if-needed"
-            return req.is_direct
-
-    def _set_req_to_reinstall(self, req):
-        # type: (InstallRequirement) -> None
-        """
-        Set a requirement to be installed.
-        """
-        # Don't uninstall the conflict if doing a user install and the
-        # conflict is not a user install.
-        if not self.use_user_site or dist_in_usersite(req.satisfied_by):
-            req.conflicts_with = req.satisfied_by
-        req.satisfied_by = None
-
-    def _check_skip_installed(self, req_to_install):
-        # type: (InstallRequirement) -> Optional[str]
-        """Check if req_to_install should be skipped.
-
-        This will check if the req is installed, and whether we should upgrade
-        or reinstall it, taking into account all the relevant user options.
-
-        After calling this req_to_install will only have satisfied_by set to
-        None if the req_to_install is to be upgraded/reinstalled etc. Any
-        other value will be a dist recording the current thing installed that
-        satisfies the requirement.
-
-        Note that for vcs urls and the like we can't assess skipping in this
-        routine - we simply identify that we need to pull the thing down,
-        then later on it is pulled down and introspected to assess upgrade/
-        reinstalls etc.
-
-        :return: A text reason for why it was skipped, or None.
-        """
-        if self.ignore_installed:
-            return None
-
-        req_to_install.check_if_exists(self.use_user_site)
-        if not req_to_install.satisfied_by:
-            return None
-
-        if self.force_reinstall:
-            self._set_req_to_reinstall(req_to_install)
-            return None
-
-        if not self._is_upgrade_allowed(req_to_install):
-            if self.upgrade_strategy == "only-if-needed":
-                return 'already satisfied, skipping upgrade'
-            return 'already satisfied'
-
-        # Check for the possibility of an upgrade.  For link-based
-        # requirements we have to pull the tree down and inspect to assess
-        # the version #, so it's handled way down.
-        if not req_to_install.link:
-            try:
-                self.finder.find_requirement(req_to_install, upgrade=True)
-            except BestVersionAlreadyInstalled:
-                # Then the best version is installed.
-                return 'already up-to-date'
-            except DistributionNotFound:
-                # No distribution found, so we squash the error.  It will
-                # be raised later when we re-try later to do the install.
-                # Why don't we just raise here?
-                pass
-
-        self._set_req_to_reinstall(req_to_install)
-        return None
-
-    def _get_abstract_dist_for(self, req):
-        # type: (InstallRequirement) -> AbstractDistribution
-        """Takes a InstallRequirement and returns a single AbstractDist \
-        representing a prepared variant of the same.
-        """
-        assert self.require_hashes is not None, (
-            "require_hashes should have been set in Resolver.resolve()"
-        )
-
-        if req.editable:
-            return self.preparer.prepare_editable_requirement(
-                req, self.require_hashes, self.use_user_site, self.finder,
-            )
-
-        # satisfied_by is only evaluated by calling _check_skip_installed,
-        # so it must be None here.
-        assert req.satisfied_by is None
-        skip_reason = self._check_skip_installed(req)
-
-        if req.satisfied_by:
-            return self.preparer.prepare_installed_requirement(
-                req, self.require_hashes, skip_reason
-            )
-
-        upgrade_allowed = self._is_upgrade_allowed(req)
-
-        # We eagerly populate the link, since that's our "legacy" behavior.
-        req.populate_link(self.finder, upgrade_allowed, self.require_hashes)
-        abstract_dist = self.preparer.prepare_linked_requirement(
-            req, self.session, self.finder, self.require_hashes
-        )
-
-        # NOTE
-        # The following portion is for determining if a certain package is
-        # going to be re-installed/upgraded or not and reporting to the user.
-        # This should probably get cleaned up in a future refactor.
-
-        # req.req is only avail after unpack for URL
-        # pkgs repeat check_if_exists to uninstall-on-upgrade
-        # (#14)
-        if not self.ignore_installed:
-            req.check_if_exists(self.use_user_site)
-
-        if req.satisfied_by:
-            should_modify = (
-                self.upgrade_strategy != "to-satisfy-only" or
-                self.force_reinstall or
-                self.ignore_installed or
-                req.link.scheme == 'file'
-            )
-            if should_modify:
-                self._set_req_to_reinstall(req)
-            else:
-                logger.info(
-                    'Requirement already satisfied (use --upgrade to upgrade):'
-                    ' %s', req,
-                )
-
-        return abstract_dist
-
-    def _resolve_one(
-        self,
-        requirement_set,  # type: RequirementSet
-        req_to_install  # type: InstallRequirement
-    ):
-        # type: (...) -> List[InstallRequirement]
-        """Prepare a single requirements file.
-
-        :return: A list of additional InstallRequirements to also install.
-        """
-        # Tell user what we are doing for this requirement:
-        # obtain (editable), skipping, processing (local url), collecting
-        # (remote url or package name)
-        if req_to_install.constraint or req_to_install.prepared:
-            return []
-
-        req_to_install.prepared = True
-
-        # register tmp src for cleanup in case something goes wrong
-        requirement_set.reqs_to_cleanup.append(req_to_install)
-
-        abstract_dist = self._get_abstract_dist_for(req_to_install)
-
-        # Parse and return dependencies
-        dist = abstract_dist.get_pkg_resources_distribution()
-        # This will raise UnsupportedPythonVersion if the given Python
-        # version isn't compatible with the distribution's Requires-Python.
-        _check_dist_requires_python(
-            dist, version_info=self._py_version_info,
-            ignore_requires_python=self.ignore_requires_python,
-        )
-
-        more_reqs = []  # type: List[InstallRequirement]
-
-        def add_req(subreq, extras_requested):
-            sub_install_req = self._make_install_req(
-                str(subreq),
-                req_to_install,
-            )
-            parent_req_name = req_to_install.name
-            to_scan_again, add_to_parent = requirement_set.add_requirement(
-                sub_install_req,
-                parent_req_name=parent_req_name,
-                extras_requested=extras_requested,
-            )
-            if parent_req_name and add_to_parent:
-                self._discovered_dependencies[parent_req_name].append(
-                    add_to_parent
-                )
-            more_reqs.extend(to_scan_again)
-
-        with indent_log():
-            # We add req_to_install before its dependencies, so that we
-            # can refer to it when adding dependencies.
-            if not requirement_set.has_requirement(req_to_install.name):
-                # 'unnamed' requirements will get added here
-                req_to_install.is_direct = True
-                requirement_set.add_requirement(
-                    req_to_install, parent_req_name=None,
-                )
-
-            if not self.ignore_dependencies:
-                if req_to_install.extras:
-                    logger.debug(
-                        "Installing extra requirements: %r",
-                        ','.join(req_to_install.extras),
-                    )
-                missing_requested = sorted(
-                    set(req_to_install.extras) - set(dist.extras)
-                )
-                for missing in missing_requested:
-                    logger.warning(
-                        '%s does not provide the extra \'%s\'',
-                        dist, missing
-                    )
-
-                available_requested = sorted(
-                    set(dist.extras) & set(req_to_install.extras)
-                )
-                for subreq in dist.requires(available_requested):
-                    add_req(subreq, extras_requested=available_requested)
-
-            if not req_to_install.editable and not req_to_install.satisfied_by:
-                # XXX: --no-install leads this to report 'Successfully
-                # downloaded' for only non-editable reqs, even though we took
-                # action on them.
-                requirement_set.successfully_downloaded.append(req_to_install)
-
-        return more_reqs
-
-    def get_installation_order(self, req_set):
-        # type: (RequirementSet) -> List[InstallRequirement]
-        """Create the installation order.
-
-        The installation order is topological - requirements are installed
-        before the requiring thing. We break cycles at an arbitrary point,
-        and make no other guarantees.
-        """
-        # The current implementation, which we may change at any point
-        # installs the user specified things in the order given, except when
-        # dependencies must come earlier to achieve topological order.
-        order = []
-        ordered_reqs = set()  # type: Set[InstallRequirement]
-
-        def schedule(req):
-            if req.satisfied_by or req in ordered_reqs:
-                return
-            if req.constraint:
-                return
-            ordered_reqs.add(req)
-            for dep in self._discovered_dependencies[req.name]:
-                schedule(dep)
-            order.append(req)
-
-        for install_req in req_set.requirements.values():
-            schedule(install_req)
-        return order
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/locations.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/locations.py
deleted file mode 100644
index 1899c7d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/locations.py
+++ /dev/null
@@ -1,156 +0,0 @@
-"""Locations where we look for configs, install stuff, etc"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import os
-import os.path
-import platform
-import site
-import sys
-import sysconfig
-from distutils import sysconfig as distutils_sysconfig
-from distutils.command.install import SCHEME_KEYS  # type: ignore
-
-from pip._internal.utils import appdirs
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, Union, Dict, List, Optional
-
-
-# Application Directories
-USER_CACHE_DIR = appdirs.user_cache_dir("pip")
-
-
-def get_major_minor_version():
-    # type: () -> str
-    """
-    Return the major-minor version of the current Python as a string, e.g.
-    "3.7" or "3.10".
-    """
-    return '{}.{}'.format(*sys.version_info)
-
-
-def get_src_prefix():
-    if running_under_virtualenv():
-        src_prefix = os.path.join(sys.prefix, 'src')
-    else:
-        # FIXME: keep src in cwd for now (it is not a temporary folder)
-        try:
-            src_prefix = os.path.join(os.getcwd(), 'src')
-        except OSError:
-            # In case the current working directory has been renamed or deleted
-            sys.exit(
-                "The folder you are executing pip from can no longer be found."
-            )
-
-    # under macOS + virtualenv sys.prefix is not properly resolved
-    # it is something like /path/to/python/bin/..
-    return os.path.abspath(src_prefix)
-
-
-# FIXME doesn't account for venv linked to global site-packages
-
-site_packages = sysconfig.get_path("purelib")  # type: Optional[str]
-
-# This is because of a bug in PyPy's sysconfig module, see
-# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
-# for more information.
-if platform.python_implementation().lower() == "pypy":
-    site_packages = distutils_sysconfig.get_python_lib()
-try:
-    # Use getusersitepackages if this is present, as it ensures that the
-    # value is initialised properly.
-    user_site = site.getusersitepackages()
-except AttributeError:
-    user_site = site.USER_SITE
-
-if WINDOWS:
-    bin_py = os.path.join(sys.prefix, 'Scripts')
-    bin_user = os.path.join(user_site, 'Scripts')
-    # buildout uses 'bin' on Windows too?
-    if not os.path.exists(bin_py):
-        bin_py = os.path.join(sys.prefix, 'bin')
-        bin_user = os.path.join(user_site, 'bin')
-else:
-    bin_py = os.path.join(sys.prefix, 'bin')
-    bin_user = os.path.join(user_site, 'bin')
-
-    # Forcing to use /usr/local/bin for standard macOS framework installs
-    # Also log to ~/Library/Logs/ for use with the Console.app log viewer
-    if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
-        bin_py = '/usr/local/bin'
-
-
-def distutils_scheme(dist_name, user=False, home=None, root=None,
-                     isolated=False, prefix=None):
-    # type:(str, bool, str, str, bool, str) -> dict
-    """
-    Return a distutils install scheme
-    """
-    from distutils.dist import Distribution
-
-    scheme = {}
-
-    if isolated:
-        extra_dist_args = {"script_args": ["--no-user-cfg"]}
-    else:
-        extra_dist_args = {}
-    dist_args = {'name': dist_name}  # type: Dict[str, Union[str, List[str]]]
-    dist_args.update(extra_dist_args)
-
-    d = Distribution(dist_args)
-    # Ignoring, typeshed issue reported python/typeshed/issues/2567
-    d.parse_config_files()
-    # NOTE: Ignoring type since mypy can't find attributes on 'Command'
-    i = d.get_command_obj('install', create=True)  # type: Any
-    assert i is not None
-    # NOTE: setting user or home has the side-effect of creating the home dir
-    # or user base for installations during finalize_options()
-    # ideally, we'd prefer a scheme class that has no side-effects.
-    assert not (user and prefix), "user={} prefix={}".format(user, prefix)
-    assert not (home and prefix), "home={} prefix={}".format(home, prefix)
-    i.user = user or i.user
-    if user or home:
-        i.prefix = ""
-    i.prefix = prefix or i.prefix
-    i.home = home or i.home
-    i.root = root or i.root
-    i.finalize_options()
-    for key in SCHEME_KEYS:
-        scheme[key] = getattr(i, 'install_' + key)
-
-    # install_lib specified in setup.cfg should install *everything*
-    # into there (i.e. it takes precedence over both purelib and
-    # platlib).  Note, i.install_lib is *always* set after
-    # finalize_options(); we only want to override here if the user
-    # has explicitly requested it hence going back to the config
-
-    # Ignoring, typeshed issue reported python/typeshed/issues/2567
-    if 'install_lib' in d.get_option_dict('install'):  # type: ignore
-        scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
-
-    if running_under_virtualenv():
-        scheme['headers'] = os.path.join(
-            sys.prefix,
-            'include',
-            'site',
-            'python{}'.format(get_major_minor_version()),
-            dist_name,
-        )
-
-        if root is not None:
-            path_no_drive = os.path.splitdrive(
-                os.path.abspath(scheme["headers"]))[1]
-            scheme["headers"] = os.path.join(
-                root,
-                path_no_drive[1:],
-            )
-
-    return scheme
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/main.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/main.py
deleted file mode 100644
index 1e92240..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/main.py
+++ /dev/null
@@ -1,47 +0,0 @@
-"""Primary application entrypoint.
-"""
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import locale
-import logging
-import os
-import sys
-
-from pip._internal.cli.autocompletion import autocomplete
-from pip._internal.cli.main_parser import parse_command
-from pip._internal.commands import create_command
-from pip._internal.exceptions import PipError
-from pip._internal.utils import deprecation
-
-logger = logging.getLogger(__name__)
-
-
-def main(args=None):
-    if args is None:
-        args = sys.argv[1:]
-
-    # Configure our deprecation warnings to be sent through loggers
-    deprecation.install_warning_logger()
-
-    autocomplete()
-
-    try:
-        cmd_name, cmd_args = parse_command(args)
-    except PipError as exc:
-        sys.stderr.write("ERROR: %s" % exc)
-        sys.stderr.write(os.linesep)
-        sys.exit(1)
-
-    # Needed for locale.getpreferredencoding(False) to work
-    # in pip._internal.utils.encoding.auto_decode
-    try:
-        locale.setlocale(locale.LC_ALL, '')
-    except locale.Error as e:
-        # setlocale can apparently crash if locale are uninitialized
-        logger.debug("Ignoring error %s when setting locale", e)
-    command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
-
-    return command.main(cmd_args)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/__init__.py
deleted file mode 100644
index 7855226..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""A package that contains models that represent entities.
-"""
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/candidate.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/candidate.py
deleted file mode 100644
index 4d49604..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/candidate.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.utils.models import KeyBasedCompareMixin
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from pip._vendor.packaging.version import _BaseVersion
-    from pip._internal.models.link import Link
-    from typing import Any
-
-
-class InstallationCandidate(KeyBasedCompareMixin):
-    """Represents a potential "candidate" for installation.
-    """
-
-    def __init__(self, project, version, link):
-        # type: (Any, str, Link) -> None
-        self.project = project
-        self.version = parse_version(version)  # type: _BaseVersion
-        self.link = link
-
-        super(InstallationCandidate, self).__init__(
-            key=(self.project, self.version, self.link),
-            defining_class=InstallationCandidate
-        )
-
-    def __repr__(self):
-        # type: () -> str
-        return "".format(
-            self.project, self.version, self.link,
-        )
-
-    def __str__(self):
-        return '{!r} candidate (version {} at {})'.format(
-            self.project, self.version, self.link,
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/format_control.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/format_control.py
deleted file mode 100644
index 5489b51..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/format_control.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.exceptions import CommandError
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Set, FrozenSet
-
-
-class FormatControl(object):
-    """Helper for managing formats from which a package can be installed.
-    """
-
-    def __init__(self, no_binary=None, only_binary=None):
-        # type: (Optional[Set], Optional[Set]) -> None
-        if no_binary is None:
-            no_binary = set()
-        if only_binary is None:
-            only_binary = set()
-
-        self.no_binary = no_binary
-        self.only_binary = only_binary
-
-    def __eq__(self, other):
-        return self.__dict__ == other.__dict__
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __repr__(self):
-        return "{}({}, {})".format(
-            self.__class__.__name__,
-            self.no_binary,
-            self.only_binary
-        )
-
-    @staticmethod
-    def handle_mutual_excludes(value, target, other):
-        # type: (str, Optional[Set], Optional[Set]) -> None
-        if value.startswith('-'):
-            raise CommandError(
-                "--no-binary / --only-binary option requires 1 argument."
-            )
-        new = value.split(',')
-        while ':all:' in new:
-            other.clear()
-            target.clear()
-            target.add(':all:')
-            del new[:new.index(':all:') + 1]
-            # Without a none, we want to discard everything as :all: covers it
-            if ':none:' not in new:
-                return
-        for name in new:
-            if name == ':none:':
-                target.clear()
-                continue
-            name = canonicalize_name(name)
-            other.discard(name)
-            target.add(name)
-
-    def get_allowed_formats(self, canonical_name):
-        # type: (str) -> FrozenSet
-        result = {"binary", "source"}
-        if canonical_name in self.only_binary:
-            result.discard('source')
-        elif canonical_name in self.no_binary:
-            result.discard('binary')
-        elif ':all:' in self.only_binary:
-            result.discard('source')
-        elif ':all:' in self.no_binary:
-            result.discard('binary')
-        return frozenset(result)
-
-    def disallow_binaries(self):
-        # type: () -> None
-        self.handle_mutual_excludes(
-            ':all:', self.no_binary, self.only_binary,
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/index.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/index.py
deleted file mode 100644
index ead1efb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/index.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-
-class PackageIndex(object):
-    """Represents a Package Index and provides easier access to endpoints
-    """
-
-    def __init__(self, url, file_storage_domain):
-        # type: (str, str) -> None
-        super(PackageIndex, self).__init__()
-        self.url = url
-        self.netloc = urllib_parse.urlsplit(url).netloc
-        self.simple_url = self._url_for_path('simple')
-        self.pypi_url = self._url_for_path('pypi')
-
-        # This is part of a temporary hack used to block installs of PyPI
-        # packages which depend on external urls only necessary until PyPI can
-        # block such packages themselves
-        self.file_storage_domain = file_storage_domain
-
-    def _url_for_path(self, path):
-        # type: (str) -> str
-        return urllib_parse.urljoin(self.url, path)
-
-
-PyPI = PackageIndex(
-    'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
-)
-TestPyPI = PackageIndex(
-    'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
-)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/link.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/link.py
deleted file mode 100644
index 2d50d17..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/link.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import os
-import posixpath
-import re
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.utils.filetypes import WHEEL_EXTENSION
-from pip._internal.utils.misc import (
-    redact_auth_from_url,
-    split_auth_from_netloc,
-    splitext,
-)
-from pip._internal.utils.models import KeyBasedCompareMixin
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url, url_to_path
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Text, Tuple, Union
-    from pip._internal.collector import HTMLPage
-    from pip._internal.utils.hashes import Hashes
-
-
-class Link(KeyBasedCompareMixin):
-    """Represents a parsed link from a Package Index's simple URL
-    """
-
-    def __init__(
-        self,
-        url,                   # type: str
-        comes_from=None,       # type: Optional[Union[str, HTMLPage]]
-        requires_python=None,  # type: Optional[str]
-        yanked_reason=None,    # type: Optional[Text]
-    ):
-        # type: (...) -> None
-        """
-        :param url: url of the resource pointed to (href of the link)
-        :param comes_from: instance of HTMLPage where the link was found,
-            or string.
-        :param requires_python: String containing the `Requires-Python`
-            metadata field, specified in PEP 345. This may be specified by
-            a data-requires-python attribute in the HTML link tag, as
-            described in PEP 503.
-        :param yanked_reason: the reason the file has been yanked, if the
-            file has been yanked, or None if the file hasn't been yanked.
-            This is the value of the "data-yanked" attribute, if present, in
-            a simple repository HTML link. If the file has been yanked but
-            no reason was provided, this should be the empty string. See
-            PEP 592 for more information and the specification.
-        """
-
-        # url can be a UNC windows share
-        if url.startswith('\\\\'):
-            url = path_to_url(url)
-
-        self._parsed_url = urllib_parse.urlsplit(url)
-        # Store the url as a private attribute to prevent accidentally
-        # trying to set a new value.
-        self._url = url
-
-        self.comes_from = comes_from
-        self.requires_python = requires_python if requires_python else None
-        self.yanked_reason = yanked_reason
-
-        super(Link, self).__init__(key=url, defining_class=Link)
-
-    def __str__(self):
-        if self.requires_python:
-            rp = ' (requires-python:%s)' % self.requires_python
-        else:
-            rp = ''
-        if self.comes_from:
-            return '%s (from %s)%s' % (redact_auth_from_url(self._url),
-                                       self.comes_from, rp)
-        else:
-            return redact_auth_from_url(str(self._url))
-
-    def __repr__(self):
-        return '' % self
-
-    @property
-    def url(self):
-        # type: () -> str
-        return self._url
-
-    @property
-    def filename(self):
-        # type: () -> str
-        path = self.path.rstrip('/')
-        name = posixpath.basename(path)
-        if not name:
-            # Make sure we don't leak auth information if the netloc
-            # includes a username and password.
-            netloc, user_pass = split_auth_from_netloc(self.netloc)
-            return netloc
-
-        name = urllib_parse.unquote(name)
-        assert name, ('URL %r produced no filename' % self._url)
-        return name
-
-    @property
-    def file_path(self):
-        # type: () -> str
-        return url_to_path(self.url)
-
-    @property
-    def scheme(self):
-        # type: () -> str
-        return self._parsed_url.scheme
-
-    @property
-    def netloc(self):
-        # type: () -> str
-        """
-        This can contain auth information.
-        """
-        return self._parsed_url.netloc
-
-    @property
-    def path(self):
-        # type: () -> str
-        return urllib_parse.unquote(self._parsed_url.path)
-
-    def splitext(self):
-        # type: () -> Tuple[str, str]
-        return splitext(posixpath.basename(self.path.rstrip('/')))
-
-    @property
-    def ext(self):
-        # type: () -> str
-        return self.splitext()[1]
-
-    @property
-    def url_without_fragment(self):
-        # type: () -> str
-        scheme, netloc, path, query, fragment = self._parsed_url
-        return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
-
-    _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
-
-    @property
-    def egg_fragment(self):
-        # type: () -> Optional[str]
-        match = self._egg_fragment_re.search(self._url)
-        if not match:
-            return None
-        return match.group(1)
-
-    _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
-
-    @property
-    def subdirectory_fragment(self):
-        # type: () -> Optional[str]
-        match = self._subdirectory_fragment_re.search(self._url)
-        if not match:
-            return None
-        return match.group(1)
-
-    _hash_re = re.compile(
-        r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
-    )
-
-    @property
-    def hash(self):
-        # type: () -> Optional[str]
-        match = self._hash_re.search(self._url)
-        if match:
-            return match.group(2)
-        return None
-
-    @property
-    def hash_name(self):
-        # type: () -> Optional[str]
-        match = self._hash_re.search(self._url)
-        if match:
-            return match.group(1)
-        return None
-
-    @property
-    def show_url(self):
-        # type: () -> Optional[str]
-        return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
-
-    @property
-    def is_file(self):
-        # type: () -> bool
-        return self.scheme == 'file'
-
-    def is_existing_dir(self):
-        # type: () -> bool
-        return self.is_file and os.path.isdir(self.file_path)
-
-    @property
-    def is_wheel(self):
-        # type: () -> bool
-        return self.ext == WHEEL_EXTENSION
-
-    @property
-    def is_vcs(self):
-        # type: () -> bool
-        from pip._internal.vcs import vcs
-
-        return self.scheme in vcs.all_schemes
-
-    @property
-    def is_yanked(self):
-        # type: () -> bool
-        return self.yanked_reason is not None
-
-    @property
-    def has_hash(self):
-        return self.hash_name is not None
-
-    def is_hash_allowed(self, hashes):
-        # type: (Optional[Hashes]) -> bool
-        """
-        Return True if the link has a hash and it is allowed.
-        """
-        if hashes is None or not self.has_hash:
-            return False
-        # Assert non-None so mypy knows self.hash_name and self.hash are str.
-        assert self.hash_name is not None
-        assert self.hash is not None
-
-        return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/search_scope.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
deleted file mode 100644
index 6e38706..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/search_scope.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import itertools
-import logging
-import os
-import posixpath
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.models.index import PyPI
-from pip._internal.utils.compat import HAS_TLS
-from pip._internal.utils.misc import normalize_path, redact_auth_from_url
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import List
-
-
-logger = logging.getLogger(__name__)
-
-
-class SearchScope(object):
-
-    """
-    Encapsulates the locations that pip is configured to search.
-    """
-
-    @classmethod
-    def create(
-        cls,
-        find_links,  # type: List[str]
-        index_urls,  # type: List[str]
-    ):
-        # type: (...) -> SearchScope
-        """
-        Create a SearchScope object after normalizing the `find_links`.
-        """
-        # Build find_links. If an argument starts with ~, it may be
-        # a local file relative to a home directory. So try normalizing
-        # it and if it exists, use the normalized version.
-        # This is deliberately conservative - it might be fine just to
-        # blindly normalize anything starting with a ~...
-        built_find_links = []  # type: List[str]
-        for link in find_links:
-            if link.startswith('~'):
-                new_link = normalize_path(link)
-                if os.path.exists(new_link):
-                    link = new_link
-            built_find_links.append(link)
-
-        # If we don't have TLS enabled, then WARN if anyplace we're looking
-        # relies on TLS.
-        if not HAS_TLS:
-            for link in itertools.chain(index_urls, built_find_links):
-                parsed = urllib_parse.urlparse(link)
-                if parsed.scheme == 'https':
-                    logger.warning(
-                        'pip is configured with locations that require '
-                        'TLS/SSL, however the ssl module in Python is not '
-                        'available.'
-                    )
-                    break
-
-        return cls(
-            find_links=built_find_links,
-            index_urls=index_urls,
-        )
-
-    def __init__(
-        self,
-        find_links,  # type: List[str]
-        index_urls,  # type: List[str]
-    ):
-        # type: (...) -> None
-        self.find_links = find_links
-        self.index_urls = index_urls
-
-    def get_formatted_locations(self):
-        # type: () -> str
-        lines = []
-        if self.index_urls and self.index_urls != [PyPI.simple_url]:
-            lines.append(
-                'Looking in indexes: {}'.format(', '.join(
-                    redact_auth_from_url(url) for url in self.index_urls))
-            )
-        if self.find_links:
-            lines.append(
-                'Looking in links: {}'.format(', '.join(
-                    redact_auth_from_url(url) for url in self.find_links))
-            )
-        return '\n'.join(lines)
-
-    def get_index_urls_locations(self, project_name):
-        # type: (str) -> List[str]
-        """Returns the locations found via self.index_urls
-
-        Checks the url_name on the main (first in the list) index and
-        use this url_name to produce all locations
-        """
-
-        def mkurl_pypi_url(url):
-            loc = posixpath.join(
-                url,
-                urllib_parse.quote(canonicalize_name(project_name)))
-            # For maximum compatibility with easy_install, ensure the path
-            # ends in a trailing slash.  Although this isn't in the spec
-            # (and PyPI can handle it without the slash) some other index
-            # implementations might break if they relied on easy_install's
-            # behavior.
-            if not loc.endswith('/'):
-                loc = loc + '/'
-            return loc
-
-        return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
deleted file mode 100644
index f58fdce..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/selection_prefs.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional
-    from pip._internal.models.format_control import FormatControl
-
-
-class SelectionPreferences(object):
-
-    """
-    Encapsulates the candidate selection preferences for downloading
-    and installing files.
-    """
-
-    # Don't include an allow_yanked default value to make sure each call
-    # site considers whether yanked releases are allowed. This also causes
-    # that decision to be made explicit in the calling code, which helps
-    # people when reading the code.
-    def __init__(
-        self,
-        allow_yanked,  # type: bool
-        allow_all_prereleases=False,  # type: bool
-        format_control=None,          # type: Optional[FormatControl]
-        prefer_binary=False,          # type: bool
-        ignore_requires_python=None,  # type: Optional[bool]
-    ):
-        # type: (...) -> None
-        """Create a SelectionPreferences object.
-
-        :param allow_yanked: Whether files marked as yanked (in the sense
-            of PEP 592) are permitted to be candidates for install.
-        :param format_control: A FormatControl object or None. Used to control
-            the selection of source packages / binary packages when consulting
-            the index and links.
-        :param prefer_binary: Whether to prefer an old, but valid, binary
-            dist over a new source dist.
-        :param ignore_requires_python: Whether to ignore incompatible
-            "Requires-Python" values in links. Defaults to False.
-        """
-        if ignore_requires_python is None:
-            ignore_requires_python = False
-
-        self.allow_yanked = allow_yanked
-        self.allow_all_prereleases = allow_all_prereleases
-        self.format_control = format_control
-        self.prefer_binary = prefer_binary
-        self.ignore_requires_python = ignore_requires_python
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/target_python.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/target_python.py
deleted file mode 100644
index a23b79c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/models/target_python.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import sys
-
-from pip._internal.pep425tags import get_supported, version_info_to_nodot
-from pip._internal.utils.misc import normalize_version_info
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import List, Optional, Tuple
-    from pip._internal.pep425tags import Pep425Tag
-
-
-class TargetPython(object):
-
-    """
-    Encapsulates the properties of a Python interpreter one is targeting
-    for a package install, download, etc.
-    """
-
-    def __init__(
-        self,
-        platform=None,  # type: Optional[str]
-        py_version_info=None,  # type: Optional[Tuple[int, ...]]
-        abi=None,  # type: Optional[str]
-        implementation=None,  # type: Optional[str]
-    ):
-        # type: (...) -> None
-        """
-        :param platform: A string or None. If None, searches for packages
-            that are supported by the current system. Otherwise, will find
-            packages that can be built on the platform passed in. These
-            packages will only be downloaded for distribution: they will
-            not be built locally.
-        :param py_version_info: An optional tuple of ints representing the
-            Python version information to use (e.g. `sys.version_info[:3]`).
-            This can have length 1, 2, or 3 when provided.
-        :param abi: A string or None. This is passed to pep425tags.py's
-            get_supported() function as is.
-        :param implementation: A string or None. This is passed to
-            pep425tags.py's get_supported() function as is.
-        """
-        # Store the given py_version_info for when we call get_supported().
-        self._given_py_version_info = py_version_info
-
-        if py_version_info is None:
-            py_version_info = sys.version_info[:3]
-        else:
-            py_version_info = normalize_version_info(py_version_info)
-
-        py_version = '.'.join(map(str, py_version_info[:2]))
-
-        self.abi = abi
-        self.implementation = implementation
-        self.platform = platform
-        self.py_version = py_version
-        self.py_version_info = py_version_info
-
-        # This is used to cache the return value of get_tags().
-        self._valid_tags = None  # type: Optional[List[Pep425Tag]]
-
-    def format_given(self):
-        # type: () -> str
-        """
-        Format the given, non-None attributes for display.
-        """
-        display_version = None
-        if self._given_py_version_info is not None:
-            display_version = '.'.join(
-                str(part) for part in self._given_py_version_info
-            )
-
-        key_values = [
-            ('platform', self.platform),
-            ('version_info', display_version),
-            ('abi', self.abi),
-            ('implementation', self.implementation),
-        ]
-        return ' '.join(
-            '{}={!r}'.format(key, value) for key, value in key_values
-            if value is not None
-        )
-
-    def get_tags(self):
-        # type: () -> List[Pep425Tag]
-        """
-        Return the supported PEP 425 tags to check wheel candidates against.
-
-        The tags are returned in order of preference (most preferred first).
-        """
-        if self._valid_tags is None:
-            # Pass versions=None if no py_version_info was given since
-            # versions=None uses special default logic.
-            py_version_info = self._given_py_version_info
-            if py_version_info is None:
-                versions = None
-            else:
-                versions = [version_info_to_nodot(py_version_info)]
-
-            tags = get_supported(
-                versions=versions,
-                platform=self.platform,
-                abi=self.abi,
-                impl=self.implementation,
-            )
-            self._valid_tags = tags
-
-        return self._valid_tags
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/__init__.py
deleted file mode 100644
index b51bde9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""Contains purely network-related utilities.
-"""
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/auth.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/auth.py
deleted file mode 100644
index 1e1da54..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/auth.py
+++ /dev/null
@@ -1,298 +0,0 @@
-"""Network Authentication Helpers
-
-Contains interface (MultiDomainBasicAuth) and associated glue code for
-providing credentials in the context of network requests.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import logging
-
-from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
-from pip._vendor.requests.utils import get_netrc_auth
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.utils.misc import (
-    ask,
-    ask_input,
-    ask_password,
-    remove_auth_from_url,
-    split_auth_netloc_from_url,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from optparse import Values
-    from typing import Dict, Optional, Tuple
-
-    from pip._internal.vcs.versioncontrol import AuthInfo
-
-    Credentials = Tuple[str, str, str]
-
-logger = logging.getLogger(__name__)
-
-try:
-    import keyring  # noqa
-except ImportError:
-    keyring = None
-except Exception as exc:
-    logger.warning(
-        "Keyring is skipped due to an exception: %s", str(exc),
-    )
-    keyring = None
-
-
-def get_keyring_auth(url, username):
-    """Return the tuple auth for a given url from keyring."""
-    if not url or not keyring:
-        return None
-
-    try:
-        try:
-            get_credential = keyring.get_credential
-        except AttributeError:
-            pass
-        else:
-            logger.debug("Getting credentials from keyring for %s", url)
-            cred = get_credential(url, username)
-            if cred is not None:
-                return cred.username, cred.password
-            return None
-
-        if username:
-            logger.debug("Getting password from keyring for %s", url)
-            password = keyring.get_password(url, username)
-            if password:
-                return username, password
-
-    except Exception as exc:
-        logger.warning(
-            "Keyring is skipped due to an exception: %s", str(exc),
-        )
-
-
-class MultiDomainBasicAuth(AuthBase):
-
-    def __init__(self, prompting=True, index_urls=None):
-        # type: (bool, Optional[Values]) -> None
-        self.prompting = prompting
-        self.index_urls = index_urls
-        self.passwords = {}  # type: Dict[str, AuthInfo]
-        # When the user is prompted to enter credentials and keyring is
-        # available, we will offer to save them. If the user accepts,
-        # this value is set to the credentials they entered. After the
-        # request authenticates, the caller should call
-        # ``save_credentials`` to save these.
-        self._credentials_to_save = None  # type: Optional[Credentials]
-
-    def _get_index_url(self, url):
-        """Return the original index URL matching the requested URL.
-
-        Cached or dynamically generated credentials may work against
-        the original index URL rather than just the netloc.
-
-        The provided url should have had its username and password
-        removed already. If the original index url had credentials then
-        they will be included in the return value.
-
-        Returns None if no matching index was found, or if --no-index
-        was specified by the user.
-        """
-        if not url or not self.index_urls:
-            return None
-
-        for u in self.index_urls:
-            prefix = remove_auth_from_url(u).rstrip("/") + "/"
-            if url.startswith(prefix):
-                return u
-
-    def _get_new_credentials(self, original_url, allow_netrc=True,
-                             allow_keyring=True):
-        """Find and return credentials for the specified URL."""
-        # Split the credentials and netloc from the url.
-        url, netloc, url_user_password = split_auth_netloc_from_url(
-            original_url,
-        )
-
-        # Start with the credentials embedded in the url
-        username, password = url_user_password
-        if username is not None and password is not None:
-            logger.debug("Found credentials in url for %s", netloc)
-            return url_user_password
-
-        # Find a matching index url for this request
-        index_url = self._get_index_url(url)
-        if index_url:
-            # Split the credentials from the url.
-            index_info = split_auth_netloc_from_url(index_url)
-            if index_info:
-                index_url, _, index_url_user_password = index_info
-                logger.debug("Found index url %s", index_url)
-
-        # If an index URL was found, try its embedded credentials
-        if index_url and index_url_user_password[0] is not None:
-            username, password = index_url_user_password
-            if username is not None and password is not None:
-                logger.debug("Found credentials in index url for %s", netloc)
-                return index_url_user_password
-
-        # Get creds from netrc if we still don't have them
-        if allow_netrc:
-            netrc_auth = get_netrc_auth(original_url)
-            if netrc_auth:
-                logger.debug("Found credentials in netrc for %s", netloc)
-                return netrc_auth
-
-        # If we don't have a password and keyring is available, use it.
-        if allow_keyring:
-            # The index url is more specific than the netloc, so try it first
-            kr_auth = (
-                get_keyring_auth(index_url, username) or
-                get_keyring_auth(netloc, username)
-            )
-            if kr_auth:
-                logger.debug("Found credentials in keyring for %s", netloc)
-                return kr_auth
-
-        return username, password
-
-    def _get_url_and_credentials(self, original_url):
-        """Return the credentials to use for the provided URL.
-
-        If allowed, netrc and keyring may be used to obtain the
-        correct credentials.
-
-        Returns (url_without_credentials, username, password). Note
-        that even if the original URL contains credentials, this
-        function may return a different username and password.
-        """
-        url, netloc, _ = split_auth_netloc_from_url(original_url)
-
-        # Use any stored credentials that we have for this netloc
-        username, password = self.passwords.get(netloc, (None, None))
-
-        if username is None and password is None:
-            # No stored credentials. Acquire new credentials without prompting
-            # the user. (e.g. from netrc, keyring, or the URL itself)
-            username, password = self._get_new_credentials(original_url)
-
-        if username is not None or password is not None:
-            # Convert the username and password if they're None, so that
-            # this netloc will show up as "cached" in the conditional above.
-            # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
-            # cache the value that is going to be used.
-            username = username or ""
-            password = password or ""
-
-            # Store any acquired credentials.
-            self.passwords[netloc] = (username, password)
-
-        assert (
-            # Credentials were found
-            (username is not None and password is not None) or
-            # Credentials were not found
-            (username is None and password is None)
-        ), "Could not load credentials from url: {}".format(original_url)
-
-        return url, username, password
-
-    def __call__(self, req):
-        # Get credentials for this request
-        url, username, password = self._get_url_and_credentials(req.url)
-
-        # Set the url of the request to the url without any credentials
-        req.url = url
-
-        if username is not None and password is not None:
-            # Send the basic auth with this request
-            req = HTTPBasicAuth(username, password)(req)
-
-        # Attach a hook to handle 401 responses
-        req.register_hook("response", self.handle_401)
-
-        return req
-
-    # Factored out to allow for easy patching in tests
-    def _prompt_for_password(self, netloc):
-        username = ask_input("User for %s: " % netloc)
-        if not username:
-            return None, None
-        auth = get_keyring_auth(netloc, username)
-        if auth:
-            return auth[0], auth[1], False
-        password = ask_password("Password: ")
-        return username, password, True
-
-    # Factored out to allow for easy patching in tests
-    def _should_save_password_to_keyring(self):
-        if not keyring:
-            return False
-        return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
-
-    def handle_401(self, resp, **kwargs):
-        # We only care about 401 responses, anything else we want to just
-        #   pass through the actual response
-        if resp.status_code != 401:
-            return resp
-
-        # We are not able to prompt the user so simply return the response
-        if not self.prompting:
-            return resp
-
-        parsed = urllib_parse.urlparse(resp.url)
-
-        # Prompt the user for a new username and password
-        username, password, save = self._prompt_for_password(parsed.netloc)
-
-        # Store the new username and password to use for future requests
-        self._credentials_to_save = None
-        if username is not None and password is not None:
-            self.passwords[parsed.netloc] = (username, password)
-
-            # Prompt to save the password to keyring
-            if save and self._should_save_password_to_keyring():
-                self._credentials_to_save = (parsed.netloc, username, password)
-
-        # Consume content and release the original connection to allow our new
-        #   request to reuse the same one.
-        resp.content
-        resp.raw.release_conn()
-
-        # Add our new username and password to the request
-        req = HTTPBasicAuth(username or "", password or "")(resp.request)
-        req.register_hook("response", self.warn_on_401)
-
-        # On successful request, save the credentials that were used to
-        # keyring. (Note that if the user responded "no" above, this member
-        # is not set and nothing will be saved.)
-        if self._credentials_to_save:
-            req.register_hook("response", self.save_credentials)
-
-        # Send our new request
-        new_resp = resp.connection.send(req, **kwargs)
-        new_resp.history.append(resp)
-
-        return new_resp
-
-    def warn_on_401(self, resp, **kwargs):
-        """Response callback to warn about incorrect credentials."""
-        if resp.status_code == 401:
-            logger.warning(
-                '401 Error, Credentials not correct for %s', resp.request.url,
-            )
-
-    def save_credentials(self, resp, **kwargs):
-        """Response callback to save credentials on success."""
-        assert keyring is not None, "should never reach here without keyring"
-        if not keyring:
-            return
-
-        creds = self._credentials_to_save
-        self._credentials_to_save = None
-        if creds and resp.status_code < 400:
-            try:
-                logger.info('Saving credentials to keyring')
-                keyring.set_password(*creds)
-            except Exception:
-                logger.exception('Failed to save credentials')
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/cache.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/cache.py
deleted file mode 100644
index d23c0ff..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/cache.py
+++ /dev/null
@@ -1,75 +0,0 @@
-"""HTTP cache implementation.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import os
-from contextlib import contextmanager
-
-from pip._vendor.cachecontrol.cache import BaseCache
-from pip._vendor.cachecontrol.caches import FileCache
-
-from pip._internal.utils.filesystem import adjacent_tmp_file, replace
-from pip._internal.utils.misc import ensure_dir
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional
-
-
-@contextmanager
-def suppressed_cache_errors():
-    """If we can't access the cache then we can just skip caching and process
-    requests as if caching wasn't enabled.
-    """
-    try:
-        yield
-    except (OSError, IOError):
-        pass
-
-
-class SafeFileCache(BaseCache):
-    """
-    A file based cache which is safe to use even when the target directory may
-    not be accessible or writable.
-    """
-
-    def __init__(self, directory):
-        # type: (str) -> None
-        assert directory is not None, "Cache directory must not be None."
-        super(SafeFileCache, self).__init__()
-        self.directory = directory
-
-    def _get_cache_path(self, name):
-        # type: (str) -> str
-        # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
-        # class for backwards-compatibility and to avoid using a non-public
-        # method.
-        hashed = FileCache.encode(name)
-        parts = list(hashed[:5]) + [hashed]
-        return os.path.join(self.directory, *parts)
-
-    def get(self, key):
-        # type: (str) -> Optional[bytes]
-        path = self._get_cache_path(key)
-        with suppressed_cache_errors():
-            with open(path, 'rb') as f:
-                return f.read()
-
-    def set(self, key, value):
-        # type: (str, bytes) -> None
-        path = self._get_cache_path(key)
-        with suppressed_cache_errors():
-            ensure_dir(os.path.dirname(path))
-
-            with adjacent_tmp_file(path) as f:
-                f.write(value)
-
-            replace(f.name, path)
-
-    def delete(self, key):
-        # type: (str) -> None
-        path = self._get_cache_path(key)
-        with suppressed_cache_errors():
-            os.remove(path)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/session.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/session.py
deleted file mode 100644
index ac6e262..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/session.py
+++ /dev/null
@@ -1,426 +0,0 @@
-"""PipSession and supporting code, containing all pip-specific
-network request configuration and behavior.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import email.utils
-import json
-import logging
-import mimetypes
-import os
-import platform
-import sys
-import warnings
-
-from pip._vendor import requests, six, urllib3
-from pip._vendor.cachecontrol import CacheControlAdapter
-from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
-from pip._vendor.requests.models import Response
-from pip._vendor.requests.structures import CaseInsensitiveDict
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.urllib3.exceptions import InsecureRequestWarning
-
-from pip import __version__
-from pip._internal.network.auth import MultiDomainBasicAuth
-from pip._internal.network.cache import SafeFileCache
-# Import ssl from compat so the initial import occurs in only one place.
-from pip._internal.utils.compat import HAS_TLS, ipaddress, ssl
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.glibc import libc_ver
-from pip._internal.utils.misc import (
-    build_url_from_netloc,
-    get_installed_version,
-    parse_netloc,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import url_to_path
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Iterator, List, Optional, Tuple, Union,
-    )
-
-    from pip._internal.models.link import Link
-
-    SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
-
-
-logger = logging.getLogger(__name__)
-
-
-# Ignore warning raised when using --trusted-host.
-warnings.filterwarnings("ignore", category=InsecureRequestWarning)
-
-
-SECURE_ORIGINS = [
-    # protocol, hostname, port
-    # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
-    ("https", "*", "*"),
-    ("*", "localhost", "*"),
-    ("*", "127.0.0.0/8", "*"),
-    ("*", "::1/128", "*"),
-    ("file", "*", None),
-    # ssh is always secure.
-    ("ssh", "*", "*"),
-]  # type: List[SecureOrigin]
-
-
-# These are environment variables present when running under various
-# CI systems.  For each variable, some CI systems that use the variable
-# are indicated.  The collection was chosen so that for each of a number
-# of popular systems, at least one of the environment variables is used.
-# This list is used to provide some indication of and lower bound for
-# CI traffic to PyPI.  Thus, it is okay if the list is not comprehensive.
-# For more background, see: https://github.com/pypa/pip/issues/5499
-CI_ENVIRONMENT_VARIABLES = (
-    # Azure Pipelines
-    'BUILD_BUILDID',
-    # Jenkins
-    'BUILD_ID',
-    # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
-    'CI',
-    # Explicit environment variable.
-    'PIP_IS_CI',
-)
-
-
-def looks_like_ci():
-    # type: () -> bool
-    """
-    Return whether it looks like pip is running under CI.
-    """
-    # We don't use the method of checking for a tty (e.g. using isatty())
-    # because some CI systems mimic a tty (e.g. Travis CI).  Thus that
-    # method doesn't provide definitive information in either direction.
-    return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
-
-
-def user_agent():
-    """
-    Return a string representing the user agent.
-    """
-    data = {
-        "installer": {"name": "pip", "version": __version__},
-        "python": platform.python_version(),
-        "implementation": {
-            "name": platform.python_implementation(),
-        },
-    }
-
-    if data["implementation"]["name"] == 'CPython':
-        data["implementation"]["version"] = platform.python_version()
-    elif data["implementation"]["name"] == 'PyPy':
-        if sys.pypy_version_info.releaselevel == 'final':
-            pypy_version_info = sys.pypy_version_info[:3]
-        else:
-            pypy_version_info = sys.pypy_version_info
-        data["implementation"]["version"] = ".".join(
-            [str(x) for x in pypy_version_info]
-        )
-    elif data["implementation"]["name"] == 'Jython':
-        # Complete Guess
-        data["implementation"]["version"] = platform.python_version()
-    elif data["implementation"]["name"] == 'IronPython':
-        # Complete Guess
-        data["implementation"]["version"] = platform.python_version()
-
-    if sys.platform.startswith("linux"):
-        from pip._vendor import distro
-        distro_infos = dict(filter(
-            lambda x: x[1],
-            zip(["name", "version", "id"], distro.linux_distribution()),
-        ))
-        libc = dict(filter(
-            lambda x: x[1],
-            zip(["lib", "version"], libc_ver()),
-        ))
-        if libc:
-            distro_infos["libc"] = libc
-        if distro_infos:
-            data["distro"] = distro_infos
-
-    if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
-        data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
-
-    if platform.system():
-        data.setdefault("system", {})["name"] = platform.system()
-
-    if platform.release():
-        data.setdefault("system", {})["release"] = platform.release()
-
-    if platform.machine():
-        data["cpu"] = platform.machine()
-
-    if HAS_TLS:
-        data["openssl_version"] = ssl.OPENSSL_VERSION
-
-    setuptools_version = get_installed_version("setuptools")
-    if setuptools_version is not None:
-        data["setuptools_version"] = setuptools_version
-
-    # Use None rather than False so as not to give the impression that
-    # pip knows it is not being run under CI.  Rather, it is a null or
-    # inconclusive result.  Also, we include some value rather than no
-    # value to make it easier to know that the check has been run.
-    data["ci"] = True if looks_like_ci() else None
-
-    user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
-    if user_data is not None:
-        data["user_data"] = user_data
-
-    return "{data[installer][name]}/{data[installer][version]} {json}".format(
-        data=data,
-        json=json.dumps(data, separators=(",", ":"), sort_keys=True),
-    )
-
-
-class LocalFSAdapter(BaseAdapter):
-
-    def send(self, request, stream=None, timeout=None, verify=None, cert=None,
-             proxies=None):
-        pathname = url_to_path(request.url)
-
-        resp = Response()
-        resp.status_code = 200
-        resp.url = request.url
-
-        try:
-            stats = os.stat(pathname)
-        except OSError as exc:
-            resp.status_code = 404
-            resp.raw = exc
-        else:
-            modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
-            content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
-            resp.headers = CaseInsensitiveDict({
-                "Content-Type": content_type,
-                "Content-Length": stats.st_size,
-                "Last-Modified": modified,
-            })
-
-            resp.raw = open(pathname, "rb")
-            resp.close = resp.raw.close
-
-        return resp
-
-    def close(self):
-        pass
-
-
-class InsecureHTTPAdapter(HTTPAdapter):
-
-    def cert_verify(self, conn, url, verify, cert):
-        conn.cert_reqs = 'CERT_NONE'
-        conn.ca_certs = None
-
-
-class PipSession(requests.Session):
-
-    timeout = None  # type: Optional[int]
-
-    def __init__(self, *args, **kwargs):
-        """
-        :param trusted_hosts: Domains not to emit warnings for when not using
-            HTTPS.
-        """
-        retries = kwargs.pop("retries", 0)
-        cache = kwargs.pop("cache", None)
-        trusted_hosts = kwargs.pop("trusted_hosts", [])  # type: List[str]
-        index_urls = kwargs.pop("index_urls", None)
-
-        super(PipSession, self).__init__(*args, **kwargs)
-
-        # Namespace the attribute with "pip_" just in case to prevent
-        # possible conflicts with the base class.
-        self.pip_trusted_origins = []  # type: List[Tuple[str, Optional[int]]]
-
-        # Attach our User Agent to the request
-        self.headers["User-Agent"] = user_agent()
-
-        # Attach our Authentication handler to the session
-        self.auth = MultiDomainBasicAuth(index_urls=index_urls)
-
-        # Create our urllib3.Retry instance which will allow us to customize
-        # how we handle retries.
-        retries = urllib3.Retry(
-            # Set the total number of retries that a particular request can
-            # have.
-            total=retries,
-
-            # A 503 error from PyPI typically means that the Fastly -> Origin
-            # connection got interrupted in some way. A 503 error in general
-            # is typically considered a transient error so we'll go ahead and
-            # retry it.
-            # A 500 may indicate transient error in Amazon S3
-            # A 520 or 527 - may indicate transient error in CloudFlare
-            status_forcelist=[500, 503, 520, 527],
-
-            # Add a small amount of back off between failed requests in
-            # order to prevent hammering the service.
-            backoff_factor=0.25,
-        )
-
-        # Check to ensure that the directory containing our cache directory
-        # is owned by the user current executing pip. If it does not exist
-        # we will check the parent directory until we find one that does exist.
-        if cache and not check_path_owner(cache):
-            logger.warning(
-                "The directory '%s' or its parent directory is not owned by "
-                "the current user and the cache has been disabled. Please "
-                "check the permissions and owner of that directory. If "
-                "executing pip with sudo, you may want sudo's -H flag.",
-                cache,
-            )
-            cache = None
-
-        # We want to _only_ cache responses on securely fetched origins. We do
-        # this because we can't validate the response of an insecurely fetched
-        # origin, and we don't want someone to be able to poison the cache and
-        # require manual eviction from the cache to fix it.
-        if cache:
-            secure_adapter = CacheControlAdapter(
-                cache=SafeFileCache(cache),
-                max_retries=retries,
-            )
-        else:
-            secure_adapter = HTTPAdapter(max_retries=retries)
-
-        # Our Insecure HTTPAdapter disables HTTPS validation. It does not
-        # support caching (see above) so we'll use it for all http:// URLs as
-        # well as any https:// host that we've marked as ignoring TLS errors
-        # for.
-        insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
-        # Save this for later use in add_insecure_host().
-        self._insecure_adapter = insecure_adapter
-
-        self.mount("https://", secure_adapter)
-        self.mount("http://", insecure_adapter)
-
-        # Enable file:// urls
-        self.mount("file://", LocalFSAdapter())
-
-        for host in trusted_hosts:
-            self.add_trusted_host(host, suppress_logging=True)
-
-    def add_trusted_host(self, host, source=None, suppress_logging=False):
-        # type: (str, Optional[str], bool) -> None
-        """
-        :param host: It is okay to provide a host that has previously been
-            added.
-        :param source: An optional source string, for logging where the host
-            string came from.
-        """
-        if not suppress_logging:
-            msg = 'adding trusted host: {!r}'.format(host)
-            if source is not None:
-                msg += ' (from {})'.format(source)
-            logger.info(msg)
-
-        host_port = parse_netloc(host)
-        if host_port not in self.pip_trusted_origins:
-            self.pip_trusted_origins.append(host_port)
-
-        self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter)
-        if not host_port[1]:
-            # Mount wildcard ports for the same host.
-            self.mount(
-                build_url_from_netloc(host) + ':',
-                self._insecure_adapter
-            )
-
-    def iter_secure_origins(self):
-        # type: () -> Iterator[SecureOrigin]
-        for secure_origin in SECURE_ORIGINS:
-            yield secure_origin
-        for host, port in self.pip_trusted_origins:
-            yield ('*', host, '*' if port is None else port)
-
-    def is_secure_origin(self, location):
-        # type: (Link) -> bool
-        # Determine if this url used a secure transport mechanism
-        parsed = urllib_parse.urlparse(str(location))
-        origin_protocol, origin_host, origin_port = (
-            parsed.scheme, parsed.hostname, parsed.port,
-        )
-
-        # The protocol to use to see if the protocol matches.
-        # Don't count the repository type as part of the protocol: in
-        # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
-        # the last scheme.)
-        origin_protocol = origin_protocol.rsplit('+', 1)[-1]
-
-        # Determine if our origin is a secure origin by looking through our
-        # hardcoded list of secure origins, as well as any additional ones
-        # configured on this PackageFinder instance.
-        for secure_origin in self.iter_secure_origins():
-            secure_protocol, secure_host, secure_port = secure_origin
-            if origin_protocol != secure_protocol and secure_protocol != "*":
-                continue
-
-            try:
-                # We need to do this decode dance to ensure that we have a
-                # unicode object, even on Python 2.x.
-                addr = ipaddress.ip_address(
-                    origin_host
-                    if (
-                        isinstance(origin_host, six.text_type) or
-                        origin_host is None
-                    )
-                    else origin_host.decode("utf8")
-                )
-                network = ipaddress.ip_network(
-                    secure_host
-                    if isinstance(secure_host, six.text_type)
-                    # setting secure_host to proper Union[bytes, str]
-                    # creates problems in other places
-                    else secure_host.decode("utf8")  # type: ignore
-                )
-            except ValueError:
-                # We don't have both a valid address or a valid network, so
-                # we'll check this origin against hostnames.
-                if (
-                    origin_host and
-                    origin_host.lower() != secure_host.lower() and
-                    secure_host != "*"
-                ):
-                    continue
-            else:
-                # We have a valid address and network, so see if the address
-                # is contained within the network.
-                if addr not in network:
-                    continue
-
-            # Check to see if the port matches.
-            if (
-                origin_port != secure_port and
-                secure_port != "*" and
-                secure_port is not None
-            ):
-                continue
-
-            # If we've gotten here, then this origin matches the current
-            # secure origin and we should return True
-            return True
-
-        # If we've gotten to this point, then the origin isn't secure and we
-        # will not accept it as a valid location to search. We will however
-        # log a warning that we are ignoring it.
-        logger.warning(
-            "The repository located at %s is not a trusted or secure host and "
-            "is being ignored. If this repository is available via HTTPS we "
-            "recommend you use HTTPS instead, otherwise you may silence "
-            "this warning and allow it anyway with '--trusted-host %s'.",
-            origin_host,
-            origin_host,
-        )
-
-        return False
-
-    def request(self, method, url, *args, **kwargs):
-        # Allow setting a default timeout on a session
-        kwargs.setdefault("timeout", self.timeout)
-
-        # Dispatch the actual request
-        return super(PipSession, self).request(method, url, *args, **kwargs)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/xmlrpc.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/xmlrpc.py
deleted file mode 100644
index 121edd9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/network/xmlrpc.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""xmlrpclib.Transport implementation
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import logging
-
-from pip._vendor import requests
-# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import
-from pip._vendor.six.moves import xmlrpc_client  # type: ignore
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-logger = logging.getLogger(__name__)
-
-
-class PipXmlrpcTransport(xmlrpc_client.Transport):
-    """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
-    object.
-    """
-
-    def __init__(self, index_url, session, use_datetime=False):
-        xmlrpc_client.Transport.__init__(self, use_datetime)
-        index_parts = urllib_parse.urlparse(index_url)
-        self._scheme = index_parts.scheme
-        self._session = session
-
-    def request(self, host, handler, request_body, verbose=False):
-        parts = (self._scheme, host, handler, None, None, None)
-        url = urllib_parse.urlunparse(parts)
-        try:
-            headers = {'Content-Type': 'text/xml'}
-            response = self._session.post(url, data=request_body,
-                                          headers=headers, stream=True)
-            response.raise_for_status()
-            self.verbose = verbose
-            return self.parse_response(response.raw)
-        except requests.HTTPError as exc:
-            logger.critical(
-                "HTTP error %s while getting %s",
-                exc.response.status_code, url,
-            )
-            raise
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/check.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/check.py
deleted file mode 100644
index 6bd1884..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/check.py
+++ /dev/null
@@ -1,163 +0,0 @@
-"""Validation of dependencies of packages
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import logging
-from collections import namedtuple
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.pkg_resources import RequirementParseError
-
-from pip._internal.distributions import (
-    make_distribution_for_install_requirement,
-)
-from pip._internal.utils.misc import get_installed_distributions
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-logger = logging.getLogger(__name__)
-
-if MYPY_CHECK_RUNNING:
-    from pip._internal.req.req_install import InstallRequirement
-    from typing import (
-        Any, Callable, Dict, Optional, Set, Tuple, List
-    )
-
-    # Shorthands
-    PackageSet = Dict[str, 'PackageDetails']
-    Missing = Tuple[str, Any]
-    Conflicting = Tuple[str, str, Any]
-
-    MissingDict = Dict[str, List[Missing]]
-    ConflictingDict = Dict[str, List[Conflicting]]
-    CheckResult = Tuple[MissingDict, ConflictingDict]
-
-PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
-
-
-def create_package_set_from_installed(**kwargs):
-    # type: (**Any) -> Tuple[PackageSet, bool]
-    """Converts a list of distributions into a PackageSet.
-    """
-    # Default to using all packages installed on the system
-    if kwargs == {}:
-        kwargs = {"local_only": False, "skip": ()}
-
-    package_set = {}
-    problems = False
-    for dist in get_installed_distributions(**kwargs):
-        name = canonicalize_name(dist.project_name)
-        try:
-            package_set[name] = PackageDetails(dist.version, dist.requires())
-        except RequirementParseError as e:
-            # Don't crash on broken metadata
-            logging.warning("Error parsing requirements for %s: %s", name, e)
-            problems = True
-    return package_set, problems
-
-
-def check_package_set(package_set, should_ignore=None):
-    # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
-    """Check if a package set is consistent
-
-    If should_ignore is passed, it should be a callable that takes a
-    package name and returns a boolean.
-    """
-    if should_ignore is None:
-        def should_ignore(name):
-            return False
-
-    missing = {}
-    conflicting = {}
-
-    for package_name in package_set:
-        # Info about dependencies of package_name
-        missing_deps = set()  # type: Set[Missing]
-        conflicting_deps = set()  # type: Set[Conflicting]
-
-        if should_ignore(package_name):
-            continue
-
-        for req in package_set[package_name].requires:
-            name = canonicalize_name(req.project_name)  # type: str
-
-            # Check if it's missing
-            if name not in package_set:
-                missed = True
-                if req.marker is not None:
-                    missed = req.marker.evaluate()
-                if missed:
-                    missing_deps.add((name, req))
-                continue
-
-            # Check if there's a conflict
-            version = package_set[name].version  # type: str
-            if not req.specifier.contains(version, prereleases=True):
-                conflicting_deps.add((name, version, req))
-
-        if missing_deps:
-            missing[package_name] = sorted(missing_deps, key=str)
-        if conflicting_deps:
-            conflicting[package_name] = sorted(conflicting_deps, key=str)
-
-    return missing, conflicting
-
-
-def check_install_conflicts(to_install):
-    # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
-    """For checking if the dependency graph would be consistent after \
-    installing given requirements
-    """
-    # Start from the current state
-    package_set, _ = create_package_set_from_installed()
-    # Install packages
-    would_be_installed = _simulate_installation_of(to_install, package_set)
-
-    # Only warn about directly-dependent packages; create a whitelist of them
-    whitelist = _create_whitelist(would_be_installed, package_set)
-
-    return (
-        package_set,
-        check_package_set(
-            package_set, should_ignore=lambda name: name not in whitelist
-        )
-    )
-
-
-def _simulate_installation_of(to_install, package_set):
-    # type: (List[InstallRequirement], PackageSet) -> Set[str]
-    """Computes the version of packages after installing to_install.
-    """
-
-    # Keep track of packages that were installed
-    installed = set()
-
-    # Modify it as installing requirement_set would (assuming no errors)
-    for inst_req in to_install:
-        abstract_dist = make_distribution_for_install_requirement(inst_req)
-        dist = abstract_dist.get_pkg_resources_distribution()
-
-        name = canonicalize_name(dist.key)
-        package_set[name] = PackageDetails(dist.version, dist.requires())
-
-        installed.add(name)
-
-    return installed
-
-
-def _create_whitelist(would_be_installed, package_set):
-    # type: (Set[str], PackageSet) -> Set[str]
-    packages_affected = set(would_be_installed)
-
-    for package_name in package_set:
-        if package_name in packages_affected:
-            continue
-
-        for req in package_set[package_name].requires:
-            if canonicalize_name(req.name) in packages_affected:
-                packages_affected.add(package_name)
-                break
-
-    return packages_affected
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/freeze.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
deleted file mode 100644
index bfdddf9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/freeze.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import collections
-import logging
-import os
-import re
-
-from pip._vendor import six
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.pkg_resources import RequirementParseError
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.req.constructors import (
-    install_req_from_editable,
-    install_req_from_line,
-)
-from pip._internal.req.req_file import COMMENT_RE
-from pip._internal.utils.misc import (
-    dist_is_editable,
-    get_installed_distributions,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
-    )
-    from pip._internal.cache import WheelCache
-    from pip._vendor.pkg_resources import (
-        Distribution, Requirement
-    )
-
-    RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
-
-
-logger = logging.getLogger(__name__)
-
-
-def freeze(
-    requirement=None,  # type: Optional[List[str]]
-    find_links=None,  # type: Optional[List[str]]
-    local_only=None,  # type: Optional[bool]
-    user_only=None,  # type: Optional[bool]
-    paths=None,  # type: Optional[List[str]]
-    skip_regex=None,  # type: Optional[str]
-    isolated=False,  # type: bool
-    wheel_cache=None,  # type: Optional[WheelCache]
-    exclude_editable=False,  # type: bool
-    skip=()  # type: Container[str]
-):
-    # type: (...) -> Iterator[str]
-    find_links = find_links or []
-    skip_match = None
-
-    if skip_regex:
-        skip_match = re.compile(skip_regex).search
-
-    for link in find_links:
-        yield '-f %s' % link
-    installations = {}  # type: Dict[str, FrozenRequirement]
-    for dist in get_installed_distributions(local_only=local_only,
-                                            skip=(),
-                                            user_only=user_only,
-                                            paths=paths):
-        try:
-            req = FrozenRequirement.from_dist(dist)
-        except RequirementParseError as exc:
-            # We include dist rather than dist.project_name because the
-            # dist string includes more information, like the version and
-            # location. We also include the exception message to aid
-            # troubleshooting.
-            logger.warning(
-                'Could not generate requirement for distribution %r: %s',
-                dist, exc
-            )
-            continue
-        if exclude_editable and req.editable:
-            continue
-        installations[req.name] = req
-
-    if requirement:
-        # the options that don't get turned into an InstallRequirement
-        # should only be emitted once, even if the same option is in multiple
-        # requirements files, so we need to keep track of what has been emitted
-        # so that we don't emit it again if it's seen again
-        emitted_options = set()  # type: Set[str]
-        # keep track of which files a requirement is in so that we can
-        # give an accurate warning if a requirement appears multiple times.
-        req_files = collections.defaultdict(list)  # type: Dict[str, List[str]]
-        for req_file_path in requirement:
-            with open(req_file_path) as req_file:
-                for line in req_file:
-                    if (not line.strip() or
-                            line.strip().startswith('#') or
-                            (skip_match and skip_match(line)) or
-                            line.startswith((
-                                '-r', '--requirement',
-                                '-Z', '--always-unzip',
-                                '-f', '--find-links',
-                                '-i', '--index-url',
-                                '--pre',
-                                '--trusted-host',
-                                '--process-dependency-links',
-                                '--extra-index-url'))):
-                        line = line.rstrip()
-                        if line not in emitted_options:
-                            emitted_options.add(line)
-                            yield line
-                        continue
-
-                    if line.startswith('-e') or line.startswith('--editable'):
-                        if line.startswith('-e'):
-                            line = line[2:].strip()
-                        else:
-                            line = line[len('--editable'):].strip().lstrip('=')
-                        line_req = install_req_from_editable(
-                            line,
-                            isolated=isolated,
-                            wheel_cache=wheel_cache,
-                        )
-                    else:
-                        line_req = install_req_from_line(
-                            COMMENT_RE.sub('', line).strip(),
-                            isolated=isolated,
-                            wheel_cache=wheel_cache,
-                        )
-
-                    if not line_req.name:
-                        logger.info(
-                            "Skipping line in requirement file [%s] because "
-                            "it's not clear what it would install: %s",
-                            req_file_path, line.strip(),
-                        )
-                        logger.info(
-                            "  (add #egg=PackageName to the URL to avoid"
-                            " this warning)"
-                        )
-                    elif line_req.name not in installations:
-                        # either it's not installed, or it is installed
-                        # but has been processed already
-                        if not req_files[line_req.name]:
-                            logger.warning(
-                                "Requirement file [%s] contains %s, but "
-                                "package %r is not installed",
-                                req_file_path,
-                                COMMENT_RE.sub('', line).strip(), line_req.name
-                            )
-                        else:
-                            req_files[line_req.name].append(req_file_path)
-                    else:
-                        yield str(installations[line_req.name]).rstrip()
-                        del installations[line_req.name]
-                        req_files[line_req.name].append(req_file_path)
-
-        # Warn about requirements that were included multiple times (in a
-        # single requirements file or in different requirements files).
-        for name, files in six.iteritems(req_files):
-            if len(files) > 1:
-                logger.warning("Requirement %s included multiple times [%s]",
-                               name, ', '.join(sorted(set(files))))
-
-        yield(
-            '## The following requirements were added by '
-            'pip freeze:'
-        )
-    for installation in sorted(
-            installations.values(), key=lambda x: x.name.lower()):
-        if canonicalize_name(installation.name) not in skip:
-            yield str(installation).rstrip()
-
-
-def get_requirement_info(dist):
-    # type: (Distribution) -> RequirementInfo
-    """
-    Compute and return values (req, editable, comments) for use in
-    FrozenRequirement.from_dist().
-    """
-    if not dist_is_editable(dist):
-        return (None, False, [])
-
-    location = os.path.normcase(os.path.abspath(dist.location))
-
-    from pip._internal.vcs import vcs, RemoteNotFoundError
-    vcs_backend = vcs.get_backend_for_dir(location)
-
-    if vcs_backend is None:
-        req = dist.as_requirement()
-        logger.debug(
-            'No VCS found for editable requirement "%s" in: %r', req,
-            location,
-        )
-        comments = [
-            '# Editable install with no version control ({})'.format(req)
-        ]
-        return (location, True, comments)
-
-    try:
-        req = vcs_backend.get_src_requirement(location, dist.project_name)
-    except RemoteNotFoundError:
-        req = dist.as_requirement()
-        comments = [
-            '# Editable {} install with no remote ({})'.format(
-                type(vcs_backend).__name__, req,
-            )
-        ]
-        return (location, True, comments)
-
-    except BadCommand:
-        logger.warning(
-            'cannot determine version of editable source in %s '
-            '(%s command not found in path)',
-            location,
-            vcs_backend.name,
-        )
-        return (None, True, [])
-
-    except InstallationError as exc:
-        logger.warning(
-            "Error when trying to get requirement for VCS system %s, "
-            "falling back to uneditable format", exc
-        )
-    else:
-        if req is not None:
-            return (req, True, [])
-
-    logger.warning(
-        'Could not determine repository location of %s', location
-    )
-    comments = ['## !! Could not determine repository location']
-
-    return (None, False, comments)
-
-
-class FrozenRequirement(object):
-    def __init__(self, name, req, editable, comments=()):
-        # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
-        self.name = name
-        self.req = req
-        self.editable = editable
-        self.comments = comments
-
-    @classmethod
-    def from_dist(cls, dist):
-        # type: (Distribution) -> FrozenRequirement
-        req, editable, comments = get_requirement_info(dist)
-        if req is None:
-            req = dist.as_requirement()
-
-        return cls(dist.project_name, req, editable, comments=comments)
-
-    def __str__(self):
-        req = self.req
-        if self.editable:
-            req = '-e %s' % req
-        return '\n'.join(list(self.comments) + [str(req)]) + '\n'
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/generate_metadata.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/generate_metadata.py
deleted file mode 100644
index 984748d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/generate_metadata.py
+++ /dev/null
@@ -1,136 +0,0 @@
-"""Metadata generation logic for source distributions.
-"""
-
-import logging
-import os
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.misc import ensure_dir
-from pip._internal.utils.setuptools_build import make_setuptools_shim_args
-from pip._internal.utils.subprocess import call_subprocess
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs import vcs
-
-if MYPY_CHECK_RUNNING:
-    from typing import Callable, List
-    from pip._internal.req.req_install import InstallRequirement
-
-logger = logging.getLogger(__name__)
-
-
-def get_metadata_generator(install_req):
-    # type: (InstallRequirement) -> Callable[[InstallRequirement], str]
-    """Return a callable metadata generator for this InstallRequirement.
-
-    A metadata generator takes an InstallRequirement (install_req) as an input,
-    generates metadata via the appropriate process for that install_req and
-    returns the generated metadata directory.
-    """
-    if not install_req.use_pep517:
-        return _generate_metadata_legacy
-
-    return _generate_metadata
-
-
-def _find_egg_info(source_directory, is_editable):
-    # type: (str, bool) -> str
-    """Find an .egg-info in `source_directory`, based on `is_editable`.
-    """
-
-    def looks_like_virtual_env(path):
-        # type: (str) -> bool
-        return (
-            os.path.lexists(os.path.join(path, 'bin', 'python')) or
-            os.path.exists(os.path.join(path, 'Scripts', 'Python.exe'))
-        )
-
-    def locate_editable_egg_info(base):
-        # type: (str) -> List[str]
-        candidates = []  # type: List[str]
-        for root, dirs, files in os.walk(base):
-            for dir_ in vcs.dirnames:
-                if dir_ in dirs:
-                    dirs.remove(dir_)
-            # Iterate over a copy of ``dirs``, since mutating
-            # a list while iterating over it can cause trouble.
-            # (See https://github.com/pypa/pip/pull/462.)
-            for dir_ in list(dirs):
-                if looks_like_virtual_env(os.path.join(root, dir_)):
-                    dirs.remove(dir_)
-                # Also don't search through tests
-                elif dir_ == 'test' or dir_ == 'tests':
-                    dirs.remove(dir_)
-            candidates.extend(os.path.join(root, dir_) for dir_ in dirs)
-        return [f for f in candidates if f.endswith('.egg-info')]
-
-    def depth_of_directory(dir_):
-        # type: (str) -> int
-        return (
-            dir_.count(os.path.sep) +
-            (os.path.altsep and dir_.count(os.path.altsep) or 0)
-        )
-
-    base = source_directory
-    if is_editable:
-        filenames = locate_editable_egg_info(base)
-    else:
-        base = os.path.join(base, 'pip-egg-info')
-        filenames = os.listdir(base)
-
-    if not filenames:
-        raise InstallationError(
-            "Files/directories not found in %s" % base
-        )
-
-    # If we have more than one match, we pick the toplevel one.  This
-    # can easily be the case if there is a dist folder which contains
-    # an extracted tarball for testing purposes.
-    if len(filenames) > 1:
-        filenames.sort(key=depth_of_directory)
-
-    return os.path.join(base, filenames[0])
-
-
-def _generate_metadata_legacy(install_req):
-    # type: (InstallRequirement) -> str
-    req_details_str = install_req.name or "from {}".format(install_req.link)
-    logger.debug(
-        'Running setup.py (path:%s) egg_info for package %s',
-        install_req.setup_py_path, req_details_str,
-    )
-
-    # Compose arguments for subprocess call
-    base_cmd = make_setuptools_shim_args(install_req.setup_py_path)
-    if install_req.isolated:
-        base_cmd += ["--no-user-cfg"]
-
-    # For non-editable installs, don't put the .egg-info files at the root,
-    # to avoid confusion due to the source code being considered an installed
-    # egg.
-    egg_base_option = []  # type: List[str]
-    if not install_req.editable:
-        egg_info_dir = os.path.join(
-            install_req.unpacked_source_directory, 'pip-egg-info',
-        )
-        egg_base_option = ['--egg-base', egg_info_dir]
-
-        # setuptools complains if the target directory does not exist.
-        ensure_dir(egg_info_dir)
-
-    with install_req.build_env:
-        call_subprocess(
-            base_cmd + ["egg_info"] + egg_base_option,
-            cwd=install_req.unpacked_source_directory,
-            command_desc='python setup.py egg_info',
-        )
-
-    # Return the .egg-info directory.
-    return _find_egg_info(
-        install_req.unpacked_source_directory,
-        install_req.editable,
-    )
-
-
-def _generate_metadata(install_req):
-    # type: (InstallRequirement) -> str
-    return install_req.prepare_pep517_metadata()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/prepare.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
deleted file mode 100644
index d093045..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/operations/prepare.py
+++ /dev/null
@@ -1,295 +0,0 @@
-"""Prepares a distribution for installation
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import logging
-import os
-
-from pip._vendor import requests
-
-from pip._internal.distributions import (
-    make_distribution_for_install_requirement,
-)
-from pip._internal.distributions.installed import InstalledDistribution
-from pip._internal.download import unpack_url
-from pip._internal.exceptions import (
-    DirectoryUrlHashUnsupported,
-    HashUnpinned,
-    InstallationError,
-    PreviousBuildDirError,
-    VcsHashUnsupported,
-)
-from pip._internal.utils.compat import expanduser
-from pip._internal.utils.hashes import MissingHashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.marker_files import write_delete_marker_file
-from pip._internal.utils.misc import display_path, normalize_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional
-
-    from pip._internal.distributions import AbstractDistribution
-    from pip._internal.index import PackageFinder
-    from pip._internal.network.session import PipSession
-    from pip._internal.req.req_install import InstallRequirement
-    from pip._internal.req.req_tracker import RequirementTracker
-
-logger = logging.getLogger(__name__)
-
-
-def _get_prepared_distribution(req, req_tracker, finder, build_isolation):
-    """Prepare a distribution for installation.
-    """
-    abstract_dist = make_distribution_for_install_requirement(req)
-    with req_tracker.track(req):
-        abstract_dist.prepare_distribution_metadata(finder, build_isolation)
-    return abstract_dist
-
-
-class RequirementPreparer(object):
-    """Prepares a Requirement
-    """
-
-    def __init__(
-        self,
-        build_dir,  # type: str
-        download_dir,  # type: Optional[str]
-        src_dir,  # type: str
-        wheel_download_dir,  # type: Optional[str]
-        progress_bar,  # type: str
-        build_isolation,  # type: bool
-        req_tracker  # type: RequirementTracker
-    ):
-        # type: (...) -> None
-        super(RequirementPreparer, self).__init__()
-
-        self.src_dir = src_dir
-        self.build_dir = build_dir
-        self.req_tracker = req_tracker
-
-        # Where still-packed archives should be written to. If None, they are
-        # not saved, and are deleted immediately after unpacking.
-        if download_dir:
-            download_dir = expanduser(download_dir)
-        self.download_dir = download_dir
-
-        # Where still-packed .whl files should be written to. If None, they are
-        # written to the download_dir parameter. Separate to download_dir to
-        # permit only keeping wheel archives for pip wheel.
-        if wheel_download_dir:
-            wheel_download_dir = normalize_path(wheel_download_dir)
-        self.wheel_download_dir = wheel_download_dir
-
-        # NOTE
-        # download_dir and wheel_download_dir overlap semantically and may
-        # be combined if we're willing to have non-wheel archives present in
-        # the wheelhouse output by 'pip wheel'.
-
-        self.progress_bar = progress_bar
-
-        # Is build isolation allowed?
-        self.build_isolation = build_isolation
-
-    @property
-    def _download_should_save(self):
-        # type: () -> bool
-        if not self.download_dir:
-            return False
-
-        if os.path.exists(self.download_dir):
-            return True
-
-        logger.critical('Could not find download directory')
-        raise InstallationError(
-            "Could not find or access download directory '%s'"
-            % display_path(self.download_dir))
-
-    def prepare_linked_requirement(
-        self,
-        req,  # type: InstallRequirement
-        session,  # type: PipSession
-        finder,  # type: PackageFinder
-        require_hashes,  # type: bool
-    ):
-        # type: (...) -> AbstractDistribution
-        """Prepare a requirement that would be obtained from req.link
-        """
-        assert req.link
-        link = req.link
-
-        # TODO: Breakup into smaller functions
-        if link.scheme == 'file':
-            path = link.file_path
-            logger.info('Processing %s', display_path(path))
-        else:
-            logger.info('Collecting %s', req.req or req)
-
-        with indent_log():
-            # @@ if filesystem packages are not marked
-            # editable in a req, a non deterministic error
-            # occurs when the script attempts to unpack the
-            # build directory
-            req.ensure_has_source_dir(self.build_dir)
-            # If a checkout exists, it's unwise to keep going.  version
-            # inconsistencies are logged later, but do not fail the
-            # installation.
-            # FIXME: this won't upgrade when there's an existing
-            # package unpacked in `req.source_dir`
-            if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
-                raise PreviousBuildDirError(
-                    "pip can't proceed with requirements '%s' due to a"
-                    " pre-existing build directory (%s). This is "
-                    "likely due to a previous installation that failed"
-                    ". pip is being responsible and not assuming it "
-                    "can delete this. Please delete it and try again."
-                    % (req, req.source_dir)
-                )
-
-            # Now that we have the real link, we can tell what kind of
-            # requirements we have and raise some more informative errors
-            # than otherwise. (For example, we can raise VcsHashUnsupported
-            # for a VCS URL rather than HashMissing.)
-            if require_hashes:
-                # We could check these first 2 conditions inside
-                # unpack_url and save repetition of conditions, but then
-                # we would report less-useful error messages for
-                # unhashable requirements, complaining that there's no
-                # hash provided.
-                if link.is_vcs:
-                    raise VcsHashUnsupported()
-                elif link.is_existing_dir():
-                    raise DirectoryUrlHashUnsupported()
-                if not req.original_link and not req.is_pinned:
-                    # Unpinned packages are asking for trouble when a new
-                    # version is uploaded. This isn't a security check, but
-                    # it saves users a surprising hash mismatch in the
-                    # future.
-                    #
-                    # file:/// URLs aren't pinnable, so don't complain
-                    # about them not being pinned.
-                    raise HashUnpinned()
-
-            hashes = req.hashes(trust_internet=not require_hashes)
-            if require_hashes and not hashes:
-                # Known-good hashes are missing for this requirement, so
-                # shim it with a facade object that will provoke hash
-                # computation and then raise a HashMissing exception
-                # showing the user what the hash should be.
-                hashes = MissingHashes()
-
-            download_dir = self.download_dir
-            if link.is_wheel and self.wheel_download_dir:
-                # when doing 'pip wheel` we download wheels to a
-                # dedicated dir.
-                download_dir = self.wheel_download_dir
-
-            try:
-                unpack_url(
-                    link, req.source_dir, download_dir,
-                    session=session, hashes=hashes,
-                    progress_bar=self.progress_bar
-                )
-            except requests.HTTPError as exc:
-                logger.critical(
-                    'Could not install requirement %s because of error %s',
-                    req,
-                    exc,
-                )
-                raise InstallationError(
-                    'Could not install requirement %s because of HTTP '
-                    'error %s for URL %s' %
-                    (req, exc, link)
-                )
-
-            if link.is_wheel:
-                if download_dir:
-                    # When downloading, we only unpack wheels to get
-                    # metadata.
-                    autodelete_unpacked = True
-                else:
-                    # When installing a wheel, we use the unpacked
-                    # wheel.
-                    autodelete_unpacked = False
-            else:
-                # We always delete unpacked sdists after pip runs.
-                autodelete_unpacked = True
-            if autodelete_unpacked:
-                write_delete_marker_file(req.source_dir)
-
-            abstract_dist = _get_prepared_distribution(
-                req, self.req_tracker, finder, self.build_isolation,
-            )
-
-            if self._download_should_save:
-                # Make a .zip of the source_dir we already created.
-                if link.is_vcs:
-                    req.archive(self.download_dir)
-        return abstract_dist
-
-    def prepare_editable_requirement(
-        self,
-        req,  # type: InstallRequirement
-        require_hashes,  # type: bool
-        use_user_site,  # type: bool
-        finder  # type: PackageFinder
-    ):
-        # type: (...) -> AbstractDistribution
-        """Prepare an editable requirement
-        """
-        assert req.editable, "cannot prepare a non-editable req as editable"
-
-        logger.info('Obtaining %s', req)
-
-        with indent_log():
-            if require_hashes:
-                raise InstallationError(
-                    'The editable requirement %s cannot be installed when '
-                    'requiring hashes, because there is no single file to '
-                    'hash.' % req
-                )
-            req.ensure_has_source_dir(self.src_dir)
-            req.update_editable(not self._download_should_save)
-
-            abstract_dist = _get_prepared_distribution(
-                req, self.req_tracker, finder, self.build_isolation,
-            )
-
-            if self._download_should_save:
-                req.archive(self.download_dir)
-            req.check_if_exists(use_user_site)
-
-        return abstract_dist
-
-    def prepare_installed_requirement(
-        self,
-        req,  # type: InstallRequirement
-        require_hashes,  # type: bool
-        skip_reason  # type: str
-    ):
-        # type: (...) -> AbstractDistribution
-        """Prepare an already-installed requirement
-        """
-        assert req.satisfied_by, "req should have been satisfied but isn't"
-        assert skip_reason is not None, (
-            "did not get skip reason skipped but req.satisfied_by "
-            "is set to %r" % (req.satisfied_by,)
-        )
-        logger.info(
-            'Requirement %s: %s (%s)',
-            skip_reason, req, req.satisfied_by.version
-        )
-        with indent_log():
-            if require_hashes:
-                logger.debug(
-                    'Since it is already installed, we are trusting this '
-                    'package without checking its hash. To ensure a '
-                    'completely repeatable environment, install into an '
-                    'empty virtualenv.'
-                )
-            abstract_dist = InstalledDistribution(req)
-
-        return abstract_dist
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pep425tags.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pep425tags.py
deleted file mode 100644
index 042ba34..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pep425tags.py
+++ /dev/null
@@ -1,449 +0,0 @@
-"""Generate and work with PEP 425 Compatibility Tags."""
-from __future__ import absolute_import
-
-import distutils.util
-import logging
-import platform
-import re
-import sys
-import sysconfig
-import warnings
-from collections import OrderedDict
-
-import pip._internal.utils.glibc
-from pip._internal.utils.compat import get_extension_suffixes
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Tuple, Callable, List, Optional, Union, Dict, Set
-    )
-
-    Pep425Tag = Tuple[str, str, str]
-
-logger = logging.getLogger(__name__)
-
-_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
-
-
-def get_config_var(var):
-    # type: (str) -> Optional[str]
-    try:
-        return sysconfig.get_config_var(var)
-    except IOError as e:  # Issue #1074
-        warnings.warn("{}".format(e), RuntimeWarning)
-        return None
-
-
-def get_abbr_impl():
-    # type: () -> str
-    """Return abbreviated implementation name."""
-    if hasattr(sys, 'pypy_version_info'):
-        pyimpl = 'pp'
-    elif sys.platform.startswith('java'):
-        pyimpl = 'jy'
-    elif sys.platform == 'cli':
-        pyimpl = 'ip'
-    else:
-        pyimpl = 'cp'
-    return pyimpl
-
-
-def version_info_to_nodot(version_info):
-    # type: (Tuple[int, ...]) -> str
-    # Only use up to the first two numbers.
-    return ''.join(map(str, version_info[:2]))
-
-
-def get_impl_ver():
-    # type: () -> str
-    """Return implementation version."""
-    impl_ver = get_config_var("py_version_nodot")
-    if not impl_ver or get_abbr_impl() == 'pp':
-        impl_ver = ''.join(map(str, get_impl_version_info()))
-    return impl_ver
-
-
-def get_impl_version_info():
-    # type: () -> Tuple[int, ...]
-    """Return sys.version_info-like tuple for use in decrementing the minor
-    version."""
-    if get_abbr_impl() == 'pp':
-        # as per https://github.com/pypa/pip/issues/2882
-        # attrs exist only on pypy
-        return (sys.version_info[0],
-                sys.pypy_version_info.major,  # type: ignore
-                sys.pypy_version_info.minor)  # type: ignore
-    else:
-        return sys.version_info[0], sys.version_info[1]
-
-
-def get_impl_tag():
-    # type: () -> str
-    """
-    Returns the Tag for this specific implementation.
-    """
-    return "{}{}".format(get_abbr_impl(), get_impl_ver())
-
-
-def get_flag(var, fallback, expected=True, warn=True):
-    # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
-    """Use a fallback method for determining SOABI flags if the needed config
-    var is unset or unavailable."""
-    val = get_config_var(var)
-    if val is None:
-        if warn:
-            logger.debug("Config variable '%s' is unset, Python ABI tag may "
-                         "be incorrect", var)
-        return fallback()
-    return val == expected
-
-
-def get_abi_tag():
-    # type: () -> Optional[str]
-    """Return the ABI tag based on SOABI (if available) or emulate SOABI
-    (CPython 2, PyPy)."""
-    soabi = get_config_var('SOABI')
-    impl = get_abbr_impl()
-    abi = None  # type: Optional[str]
-
-    if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
-        d = ''
-        m = ''
-        u = ''
-        is_cpython = (impl == 'cp')
-        if get_flag(
-                'Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'),
-                warn=is_cpython):
-            d = 'd'
-        if sys.version_info < (3, 8) and get_flag(
-                'WITH_PYMALLOC', lambda: is_cpython, warn=is_cpython):
-            m = 'm'
-        if sys.version_info < (3, 3) and get_flag(
-                'Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff,
-                expected=4, warn=is_cpython):
-            u = 'u'
-        abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
-    elif soabi and soabi.startswith('cpython-'):
-        abi = 'cp' + soabi.split('-')[1]
-    elif soabi:
-        abi = soabi.replace('.', '_').replace('-', '_')
-
-    return abi
-
-
-def _is_running_32bit():
-    # type: () -> bool
-    return sys.maxsize == 2147483647
-
-
-def get_platform():
-    # type: () -> str
-    """Return our platform name 'win32', 'linux_x86_64'"""
-    if sys.platform == 'darwin':
-        # distutils.util.get_platform() returns the release based on the value
-        # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
-        # be significantly older than the user's current machine.
-        release, _, machine = platform.mac_ver()
-        split_ver = release.split('.')
-
-        if machine == "x86_64" and _is_running_32bit():
-            machine = "i386"
-        elif machine == "ppc64" and _is_running_32bit():
-            machine = "ppc"
-
-        return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
-
-    # XXX remove distutils dependency
-    result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
-    if result == "linux_x86_64" and _is_running_32bit():
-        # 32 bit Python program (running on a 64 bit Linux): pip should only
-        # install and run 32 bit compiled extensions in that case.
-        result = "linux_i686"
-
-    return result
-
-
-def is_linux_armhf():
-    # type: () -> bool
-    if get_platform() != "linux_armv7l":
-        return False
-    # hard-float ABI can be detected from the ELF header of the running
-    # process
-    try:
-        with open(sys.executable, 'rb') as f:
-            elf_header_raw = f.read(40)  # read 40 first bytes of ELF header
-    except (IOError, OSError, TypeError):
-        return False
-    if elf_header_raw is None or len(elf_header_raw) < 40:
-        return False
-    if isinstance(elf_header_raw, str):
-        elf_header = [ord(c) for c in elf_header_raw]
-    else:
-        elf_header = [b for b in elf_header_raw]
-    result = elf_header[0:4] == [0x7f, 0x45, 0x4c, 0x46]  # ELF magic number
-    result &= elf_header[4:5] == [1]  # 32-bit ELF
-    result &= elf_header[5:6] == [1]  # little-endian
-    result &= elf_header[18:20] == [0x28, 0]  # ARM machine
-    result &= elf_header[39:40] == [5]  # ARM EABIv5
-    result &= (elf_header[37:38][0] & 4) == 4  # EF_ARM_ABI_FLOAT_HARD
-    return result
-
-
-def is_manylinux1_compatible():
-    # type: () -> bool
-    # Only Linux, and only x86-64 / i686
-    if get_platform() not in {"linux_x86_64", "linux_i686"}:
-        return False
-
-    # Check for presence of _manylinux module
-    try:
-        import _manylinux
-        return bool(_manylinux.manylinux1_compatible)
-    except (ImportError, AttributeError):
-        # Fall through to heuristic check below
-        pass
-
-    # Check glibc version. CentOS 5 uses glibc 2.5.
-    return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
-
-
-def is_manylinux2010_compatible():
-    # type: () -> bool
-    # Only Linux, and only x86-64 / i686
-    if get_platform() not in {"linux_x86_64", "linux_i686"}:
-        return False
-
-    # Check for presence of _manylinux module
-    try:
-        import _manylinux
-        return bool(_manylinux.manylinux2010_compatible)
-    except (ImportError, AttributeError):
-        # Fall through to heuristic check below
-        pass
-
-    # Check glibc version. CentOS 6 uses glibc 2.12.
-    return pip._internal.utils.glibc.have_compatible_glibc(2, 12)
-
-
-def is_manylinux2014_compatible():
-    # type: () -> bool
-    # Only Linux, and only supported architectures
-    platform = get_platform()
-    if platform not in {"linux_x86_64", "linux_i686", "linux_aarch64",
-                        "linux_armv7l", "linux_ppc64", "linux_ppc64le",
-                        "linux_s390x"}:
-        return False
-
-    # check for hard-float ABI in case we're running linux_armv7l not to
-    # install hard-float ABI wheel in a soft-float ABI environment
-    if platform == "linux_armv7l" and not is_linux_armhf():
-        return False
-
-    # Check for presence of _manylinux module
-    try:
-        import _manylinux
-        return bool(_manylinux.manylinux2014_compatible)
-    except (ImportError, AttributeError):
-        # Fall through to heuristic check below
-        pass
-
-    # Check glibc version. CentOS 7 uses glibc 2.17.
-    return pip._internal.utils.glibc.have_compatible_glibc(2, 17)
-
-
-def get_darwin_arches(major, minor, machine):
-    # type: (int, int, str) -> List[str]
-    """Return a list of supported arches (including group arches) for
-    the given major, minor and machine architecture of an macOS machine.
-    """
-    arches = []
-
-    def _supports_arch(major, minor, arch):
-        # type: (int, int, str) -> bool
-        # Looking at the application support for macOS versions in the chart
-        # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
-        # our timeline looks roughly like:
-        #
-        # 10.0 - Introduces ppc support.
-        # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
-        #        and x86_64 support is CLI only, and cannot be used for GUI
-        #        applications.
-        # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
-        # 10.6 - Drops support for ppc64
-        # 10.7 - Drops support for ppc
-        #
-        # Given that we do not know if we're installing a CLI or a GUI
-        # application, we must be conservative and assume it might be a GUI
-        # application and behave as if ppc64 and x86_64 support did not occur
-        # until 10.5.
-        #
-        # Note: The above information is taken from the "Application support"
-        #       column in the chart not the "Processor support" since I believe
-        #       that we care about what instruction sets an application can use
-        #       not which processors the OS supports.
-        if arch == 'ppc':
-            return (major, minor) <= (10, 5)
-        if arch == 'ppc64':
-            return (major, minor) == (10, 5)
-        if arch == 'i386':
-            return (major, minor) >= (10, 4)
-        if arch == 'x86_64':
-            return (major, minor) >= (10, 5)
-        if arch in groups:
-            for garch in groups[arch]:
-                if _supports_arch(major, minor, garch):
-                    return True
-        return False
-
-    groups = OrderedDict([
-        ("fat", ("i386", "ppc")),
-        ("intel", ("x86_64", "i386")),
-        ("fat64", ("x86_64", "ppc64")),
-        ("fat32", ("x86_64", "i386", "ppc")),
-    ])  # type: Dict[str, Tuple[str, ...]]
-
-    if _supports_arch(major, minor, machine):
-        arches.append(machine)
-
-    for garch in groups:
-        if machine in groups[garch] and _supports_arch(major, minor, garch):
-            arches.append(garch)
-
-    arches.append('universal')
-
-    return arches
-
-
-def get_all_minor_versions_as_strings(version_info):
-    # type: (Tuple[int, ...]) -> List[str]
-    versions = []
-    major = version_info[:-1]
-    # Support all previous minor Python versions.
-    for minor in range(version_info[-1], -1, -1):
-        versions.append(''.join(map(str, major + (minor,))))
-    return versions
-
-
-def get_supported(
-    versions=None,  # type: Optional[List[str]]
-    noarch=False,  # type: bool
-    platform=None,  # type: Optional[str]
-    impl=None,  # type: Optional[str]
-    abi=None  # type: Optional[str]
-):
-    # type: (...) -> List[Pep425Tag]
-    """Return a list of supported tags for each version specified in
-    `versions`.
-
-    :param versions: a list of string versions, of the form ["33", "32"],
-        or None. The first version will be assumed to support our ABI.
-    :param platform: specify the exact platform you want valid
-        tags for, or None. If None, use the local system platform.
-    :param impl: specify the exact implementation you want valid
-        tags for, or None. If None, use the local interpreter impl.
-    :param abi: specify the exact abi you want valid
-        tags for, or None. If None, use the local interpreter abi.
-    """
-    supported = []
-
-    # Versions must be given with respect to the preference
-    if versions is None:
-        version_info = get_impl_version_info()
-        versions = get_all_minor_versions_as_strings(version_info)
-
-    impl = impl or get_abbr_impl()
-
-    abis = []  # type: List[str]
-
-    abi = abi or get_abi_tag()
-    if abi:
-        abis[0:0] = [abi]
-
-    abi3s = set()  # type: Set[str]
-    for suffix in get_extension_suffixes():
-        if suffix.startswith('.abi'):
-            abi3s.add(suffix.split('.', 2)[1])
-
-    abis.extend(sorted(list(abi3s)))
-
-    abis.append('none')
-
-    if not noarch:
-        arch = platform or get_platform()
-        arch_prefix, arch_sep, arch_suffix = arch.partition('_')
-        if arch.startswith('macosx'):
-            # support macosx-10.6-intel on macosx-10.9-x86_64
-            match = _osx_arch_pat.match(arch)
-            if match:
-                name, major, minor, actual_arch = match.groups()
-                tpl = '{}_{}_%i_%s'.format(name, major)
-                arches = []
-                for m in reversed(range(int(minor) + 1)):
-                    for a in get_darwin_arches(int(major), m, actual_arch):
-                        arches.append(tpl % (m, a))
-            else:
-                # arch pattern didn't match (?!)
-                arches = [arch]
-        elif arch_prefix == 'manylinux2014':
-            arches = [arch]
-            # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
-            # with the exception of wheels depending on ncurses. PEP 599 states
-            # manylinux1/manylinux2010 wheels should be considered
-            # manylinux2014 wheels:
-            # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
-            if arch_suffix in {'i686', 'x86_64'}:
-                arches.append('manylinux2010' + arch_sep + arch_suffix)
-                arches.append('manylinux1' + arch_sep + arch_suffix)
-        elif arch_prefix == 'manylinux2010':
-            # manylinux1 wheels run on most manylinux2010 systems with the
-            # exception of wheels depending on ncurses. PEP 571 states
-            # manylinux1 wheels should be considered manylinux2010 wheels:
-            # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
-            arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
-        elif platform is None:
-            arches = []
-            if is_manylinux2014_compatible():
-                arches.append('manylinux2014' + arch_sep + arch_suffix)
-            if is_manylinux2010_compatible():
-                arches.append('manylinux2010' + arch_sep + arch_suffix)
-            if is_manylinux1_compatible():
-                arches.append('manylinux1' + arch_sep + arch_suffix)
-            arches.append(arch)
-        else:
-            arches = [arch]
-
-        # Current version, current API (built specifically for our Python):
-        for abi in abis:
-            for arch in arches:
-                supported.append(('%s%s' % (impl, versions[0]), abi, arch))
-
-        # abi3 modules compatible with older version of Python
-        for version in versions[1:]:
-            # abi3 was introduced in Python 3.2
-            if version in {'31', '30'}:
-                break
-            for abi in abi3s:   # empty set if not Python 3
-                for arch in arches:
-                    supported.append(("%s%s" % (impl, version), abi, arch))
-
-        # Has binaries, does not use the Python API:
-        for arch in arches:
-            supported.append(('py%s' % (versions[0][0]), 'none', arch))
-
-    # No abi / arch, but requires our implementation:
-    supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
-    # Tagged specifically as being cross-version compatible
-    # (with just the major version specified)
-    supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
-
-    # No abi / arch, generic Python
-    for i, version in enumerate(versions):
-        supported.append(('py%s' % (version,), 'none', 'any'))
-        if i == 0:
-            supported.append(('py%s' % (version[0]), 'none', 'any'))
-
-    return supported
-
-
-implementation_tag = get_impl_tag()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pyproject.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pyproject.py
deleted file mode 100644
index 98c20f7..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/pyproject.py
+++ /dev/null
@@ -1,171 +0,0 @@
-from __future__ import absolute_import
-
-import io
-import os
-import sys
-
-from pip._vendor import pytoml, six
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, Tuple, Optional, List
-
-
-def _is_list_of_str(obj):
-    # type: (Any) -> bool
-    return (
-        isinstance(obj, list) and
-        all(isinstance(item, six.string_types) for item in obj)
-    )
-
-
-def make_pyproject_path(unpacked_source_directory):
-    # type: (str) -> str
-    path = os.path.join(unpacked_source_directory, 'pyproject.toml')
-
-    # Python2 __file__ should not be unicode
-    if six.PY2 and isinstance(path, six.text_type):
-        path = path.encode(sys.getfilesystemencoding())
-
-    return path
-
-
-def load_pyproject_toml(
-    use_pep517,  # type: Optional[bool]
-    pyproject_toml,  # type: str
-    setup_py,  # type: str
-    req_name  # type: str
-):
-    # type: (...) -> Optional[Tuple[List[str], str, List[str]]]
-    """Load the pyproject.toml file.
-
-    Parameters:
-        use_pep517 - Has the user requested PEP 517 processing? None
-                     means the user hasn't explicitly specified.
-        pyproject_toml - Location of the project's pyproject.toml file
-        setup_py - Location of the project's setup.py file
-        req_name - The name of the requirement we're processing (for
-                   error reporting)
-
-    Returns:
-        None if we should use the legacy code path, otherwise a tuple
-        (
-            requirements from pyproject.toml,
-            name of PEP 517 backend,
-            requirements we should check are installed after setting
-                up the build environment
-        )
-    """
-    has_pyproject = os.path.isfile(pyproject_toml)
-    has_setup = os.path.isfile(setup_py)
-
-    if has_pyproject:
-        with io.open(pyproject_toml, encoding="utf-8") as f:
-            pp_toml = pytoml.load(f)
-        build_system = pp_toml.get("build-system")
-    else:
-        build_system = None
-
-    # The following cases must use PEP 517
-    # We check for use_pep517 being non-None and falsey because that means
-    # the user explicitly requested --no-use-pep517.  The value 0 as
-    # opposed to False can occur when the value is provided via an
-    # environment variable or config file option (due to the quirk of
-    # strtobool() returning an integer in pip's configuration code).
-    if has_pyproject and not has_setup:
-        if use_pep517 is not None and not use_pep517:
-            raise InstallationError(
-                "Disabling PEP 517 processing is invalid: "
-                "project does not have a setup.py"
-            )
-        use_pep517 = True
-    elif build_system and "build-backend" in build_system:
-        if use_pep517 is not None and not use_pep517:
-            raise InstallationError(
-                "Disabling PEP 517 processing is invalid: "
-                "project specifies a build backend of {} "
-                "in pyproject.toml".format(
-                    build_system["build-backend"]
-                )
-            )
-        use_pep517 = True
-
-    # If we haven't worked out whether to use PEP 517 yet,
-    # and the user hasn't explicitly stated a preference,
-    # we do so if the project has a pyproject.toml file.
-    elif use_pep517 is None:
-        use_pep517 = has_pyproject
-
-    # At this point, we know whether we're going to use PEP 517.
-    assert use_pep517 is not None
-
-    # If we're using the legacy code path, there is nothing further
-    # for us to do here.
-    if not use_pep517:
-        return None
-
-    if build_system is None:
-        # Either the user has a pyproject.toml with no build-system
-        # section, or the user has no pyproject.toml, but has opted in
-        # explicitly via --use-pep517.
-        # In the absence of any explicit backend specification, we
-        # assume the setuptools backend that most closely emulates the
-        # traditional direct setup.py execution, and require wheel and
-        # a version of setuptools that supports that backend.
-
-        build_system = {
-            "requires": ["setuptools>=40.8.0", "wheel"],
-            "build-backend": "setuptools.build_meta:__legacy__",
-        }
-
-    # If we're using PEP 517, we have build system information (either
-    # from pyproject.toml, or defaulted by the code above).
-    # Note that at this point, we do not know if the user has actually
-    # specified a backend, though.
-    assert build_system is not None
-
-    # Ensure that the build-system section in pyproject.toml conforms
-    # to PEP 518.
-    error_template = (
-        "{package} has a pyproject.toml file that does not comply "
-        "with PEP 518: {reason}"
-    )
-
-    # Specifying the build-system table but not the requires key is invalid
-    if "requires" not in build_system:
-        raise InstallationError(
-            error_template.format(package=req_name, reason=(
-                "it has a 'build-system' table but not "
-                "'build-system.requires' which is mandatory in the table"
-            ))
-        )
-
-    # Error out if requires is not a list of strings
-    requires = build_system["requires"]
-    if not _is_list_of_str(requires):
-        raise InstallationError(error_template.format(
-            package=req_name,
-            reason="'build-system.requires' is not a list of strings.",
-        ))
-
-    backend = build_system.get("build-backend")
-    check = []  # type: List[str]
-    if backend is None:
-        # If the user didn't specify a backend, we assume they want to use
-        # the setuptools backend. But we can't be sure they have included
-        # a version of setuptools which supplies the backend, or wheel
-        # (which is needed by the backend) in their requirements. So we
-        # make a note to check that those requirements are present once
-        # we have set up the environment.
-        # This is quite a lot of work to check for a very specific case. But
-        # the problem is, that case is potentially quite common - projects that
-        # adopted PEP 518 early for the ability to specify requirements to
-        # execute setup.py, but never considered needing to mention the build
-        # tools themselves. The original PEP 518 code had a similar check (but
-        # implemented in a different way).
-        backend = "setuptools.build_meta:__legacy__"
-        check = ["setuptools>=40.8.0", "wheel"]
-
-    return (requires, backend, check)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/__init__.py
deleted file mode 100644
index 993f23a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/__init__.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import logging
-
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-from .req_file import parse_requirements
-from .req_install import InstallRequirement
-from .req_set import RequirementSet
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, List, Sequence
-
-__all__ = [
-    "RequirementSet", "InstallRequirement",
-    "parse_requirements", "install_given_reqs",
-]
-
-logger = logging.getLogger(__name__)
-
-
-def install_given_reqs(
-    to_install,  # type: List[InstallRequirement]
-    install_options,  # type: List[str]
-    global_options=(),  # type: Sequence[str]
-    *args,  # type: Any
-    **kwargs  # type: Any
-):
-    # type: (...) -> List[InstallRequirement]
-    """
-    Install everything in the given list.
-
-    (to be called after having downloaded and unpacked the packages)
-    """
-
-    if to_install:
-        logger.info(
-            'Installing collected packages: %s',
-            ', '.join([req.name for req in to_install]),
-        )
-
-    with indent_log():
-        for requirement in to_install:
-            if requirement.conflicts_with:
-                logger.info(
-                    'Found existing installation: %s',
-                    requirement.conflicts_with,
-                )
-                with indent_log():
-                    uninstalled_pathset = requirement.uninstall(
-                        auto_confirm=True
-                    )
-            try:
-                requirement.install(
-                    install_options,
-                    global_options,
-                    *args,
-                    **kwargs
-                )
-            except Exception:
-                should_rollback = (
-                    requirement.conflicts_with and
-                    not requirement.install_succeeded
-                )
-                # if install did not succeed, rollback previous uninstall
-                if should_rollback:
-                    uninstalled_pathset.rollback()
-                raise
-            else:
-                should_commit = (
-                    requirement.conflicts_with and
-                    requirement.install_succeeded
-                )
-                if should_commit:
-                    uninstalled_pathset.commit()
-            requirement.remove_temporary_source()
-
-    return to_install
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/constructors.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/constructors.py
deleted file mode 100644
index 03b5148..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/constructors.py
+++ /dev/null
@@ -1,436 +0,0 @@
-"""Backing implementation for InstallRequirement's various constructors
-
-The idea here is that these formed a major chunk of InstallRequirement's size
-so, moving them and support code dedicated to them outside of that class
-helps creates for better understandability for the rest of the code.
-
-These are meant to be used elsewhere within pip to create instances of
-InstallRequirement.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-import logging
-import os
-import re
-
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-from pip._vendor.packaging.specifiers import Specifier
-from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.models.index import PyPI, TestPyPI
-from pip._internal.models.link import Link
-from pip._internal.pyproject import make_pyproject_path
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
-from pip._internal.utils.misc import is_installable_dir, splitext
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs import is_url, vcs
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Dict, Optional, Set, Tuple, Union,
-    )
-    from pip._internal.cache import WheelCache
-
-
-__all__ = [
-    "install_req_from_editable", "install_req_from_line",
-    "parse_editable"
-]
-
-logger = logging.getLogger(__name__)
-operators = Specifier._operators.keys()
-
-
-def is_archive_file(name):
-    # type: (str) -> bool
-    """Return True if `name` is a considered as an archive file."""
-    ext = splitext(name)[1].lower()
-    if ext in ARCHIVE_EXTENSIONS:
-        return True
-    return False
-
-
-def _strip_extras(path):
-    # type: (str) -> Tuple[str, Optional[str]]
-    m = re.match(r'^(.+)(\[[^\]]+\])$', path)
-    extras = None
-    if m:
-        path_no_extras = m.group(1)
-        extras = m.group(2)
-    else:
-        path_no_extras = path
-
-    return path_no_extras, extras
-
-
-def convert_extras(extras):
-    # type: (Optional[str]) -> Set[str]
-    if not extras:
-        return set()
-    return Requirement("placeholder" + extras.lower()).extras
-
-
-def parse_editable(editable_req):
-    # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
-    """Parses an editable requirement into:
-        - a requirement name
-        - an URL
-        - extras
-        - editable options
-    Accepted requirements:
-        svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
-        .[some_extra]
-    """
-
-    url = editable_req
-
-    # If a file path is specified with extras, strip off the extras.
-    url_no_extras, extras = _strip_extras(url)
-
-    if os.path.isdir(url_no_extras):
-        if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
-            msg = (
-                'File "setup.py" not found. Directory cannot be installed '
-                'in editable mode: {}'.format(os.path.abspath(url_no_extras))
-            )
-            pyproject_path = make_pyproject_path(url_no_extras)
-            if os.path.isfile(pyproject_path):
-                msg += (
-                    '\n(A "pyproject.toml" file was found, but editable '
-                    'mode currently requires a setup.py based build.)'
-                )
-            raise InstallationError(msg)
-
-        # Treating it as code that has already been checked out
-        url_no_extras = path_to_url(url_no_extras)
-
-    if url_no_extras.lower().startswith('file:'):
-        package_name = Link(url_no_extras).egg_fragment
-        if extras:
-            return (
-                package_name,
-                url_no_extras,
-                Requirement("placeholder" + extras.lower()).extras,
-            )
-        else:
-            return package_name, url_no_extras, None
-
-    for version_control in vcs:
-        if url.lower().startswith('%s:' % version_control):
-            url = '%s+%s' % (version_control, url)
-            break
-
-    if '+' not in url:
-        raise InstallationError(
-            '{} is not a valid editable requirement. '
-            'It should either be a path to a local project or a VCS URL '
-            '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
-        )
-
-    vc_type = url.split('+', 1)[0].lower()
-
-    if not vcs.get_backend(vc_type):
-        error_message = 'For --editable=%s only ' % editable_req + \
-            ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
-            ' is currently supported'
-        raise InstallationError(error_message)
-
-    package_name = Link(url).egg_fragment
-    if not package_name:
-        raise InstallationError(
-            "Could not detect requirement name for '%s', please specify one "
-            "with #egg=your_package_name" % editable_req
-        )
-    return package_name, url, None
-
-
-def deduce_helpful_msg(req):
-    # type: (str) -> str
-    """Returns helpful msg in case requirements file does not exist,
-    or cannot be parsed.
-
-    :params req: Requirements file path
-    """
-    msg = ""
-    if os.path.exists(req):
-        msg = " It does exist."
-        # Try to parse and check if it is a requirements file.
-        try:
-            with open(req, 'r') as fp:
-                # parse first line only
-                next(parse_requirements(fp.read()))
-                msg += " The argument you provided " + \
-                    "(%s) appears to be a" % (req) + \
-                    " requirements file. If that is the" + \
-                    " case, use the '-r' flag to install" + \
-                    " the packages specified within it."
-        except RequirementParseError:
-            logger.debug("Cannot parse '%s' as requirements \
-            file" % (req), exc_info=True)
-    else:
-        msg += " File '%s' does not exist." % (req)
-    return msg
-
-
-class RequirementParts(object):
-    def __init__(
-            self,
-            requirement,  # type: Optional[Requirement]
-            link,         # type: Optional[Link]
-            markers,      # type: Optional[Marker]
-            extras,       # type: Set[str]
-    ):
-        self.requirement = requirement
-        self.link = link
-        self.markers = markers
-        self.extras = extras
-
-
-def parse_req_from_editable(editable_req):
-    # type: (str) -> RequirementParts
-    name, url, extras_override = parse_editable(editable_req)
-
-    if name is not None:
-        try:
-            req = Requirement(name)
-        except InvalidRequirement:
-            raise InstallationError("Invalid requirement: '%s'" % name)
-    else:
-        req = None
-
-    link = Link(url)
-
-    return RequirementParts(req, link, None, extras_override)
-
-
-# ---- The actual constructors follow ----
-
-
-def install_req_from_editable(
-    editable_req,  # type: str
-    comes_from=None,  # type: Optional[str]
-    use_pep517=None,  # type: Optional[bool]
-    isolated=False,  # type: bool
-    options=None,  # type: Optional[Dict[str, Any]]
-    wheel_cache=None,  # type: Optional[WheelCache]
-    constraint=False  # type: bool
-):
-    # type: (...) -> InstallRequirement
-
-    parts = parse_req_from_editable(editable_req)
-
-    source_dir = parts.link.file_path if parts.link.scheme == 'file' else None
-
-    return InstallRequirement(
-        parts.requirement, comes_from, source_dir=source_dir,
-        editable=True,
-        link=parts.link,
-        constraint=constraint,
-        use_pep517=use_pep517,
-        isolated=isolated,
-        options=options if options else {},
-        wheel_cache=wheel_cache,
-        extras=parts.extras,
-    )
-
-
-def _looks_like_path(name):
-    # type: (str) -> bool
-    """Checks whether the string "looks like" a path on the filesystem.
-
-    This does not check whether the target actually exists, only judge from the
-    appearance.
-
-    Returns true if any of the following conditions is true:
-    * a path separator is found (either os.path.sep or os.path.altsep);
-    * a dot is found (which represents the current directory).
-    """
-    if os.path.sep in name:
-        return True
-    if os.path.altsep is not None and os.path.altsep in name:
-        return True
-    if name.startswith("."):
-        return True
-    return False
-
-
-def _get_url_from_path(path, name):
-    # type: (str, str) -> str
-    """
-    First, it checks whether a provided path is an installable directory
-    (e.g. it has a setup.py). If it is, returns the path.
-
-    If false, check if the path is an archive file (such as a .whl).
-    The function checks if the path is a file. If false, if the path has
-    an @, it will treat it as a PEP 440 URL requirement and return the path.
-    """
-    if _looks_like_path(name) and os.path.isdir(path):
-        if is_installable_dir(path):
-            return path_to_url(path)
-        raise InstallationError(
-            "Directory %r is not installable. Neither 'setup.py' "
-            "nor 'pyproject.toml' found." % name
-        )
-    if not is_archive_file(path):
-        return None
-    if os.path.isfile(path):
-        return path_to_url(path)
-    urlreq_parts = name.split('@', 1)
-    if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
-        # If the path contains '@' and the part before it does not look
-        # like a path, try to treat it as a PEP 440 URL req instead.
-        return None
-    logger.warning(
-        'Requirement %r looks like a filename, but the '
-        'file does not exist',
-        name
-    )
-    return path_to_url(path)
-
-
-def parse_req_from_line(name, line_source):
-    # type: (str, Optional[str]) -> RequirementParts
-    if is_url(name):
-        marker_sep = '; '
-    else:
-        marker_sep = ';'
-    if marker_sep in name:
-        name, markers_as_string = name.split(marker_sep, 1)
-        markers_as_string = markers_as_string.strip()
-        if not markers_as_string:
-            markers = None
-        else:
-            markers = Marker(markers_as_string)
-    else:
-        markers = None
-    name = name.strip()
-    req_as_string = None
-    path = os.path.normpath(os.path.abspath(name))
-    link = None
-    extras_as_string = None
-
-    if is_url(name):
-        link = Link(name)
-    else:
-        p, extras_as_string = _strip_extras(path)
-        url = _get_url_from_path(p, name)
-        if url is not None:
-            link = Link(url)
-
-    # it's a local file, dir, or url
-    if link:
-        # Handle relative file URLs
-        if link.scheme == 'file' and re.search(r'\.\./', link.url):
-            link = Link(
-                path_to_url(os.path.normpath(os.path.abspath(link.path))))
-        # wheel file
-        if link.is_wheel:
-            wheel = Wheel(link.filename)  # can raise InvalidWheelFilename
-            req_as_string = "%s==%s" % (wheel.name, wheel.version)
-        else:
-            # set the req to the egg fragment.  when it's not there, this
-            # will become an 'unnamed' requirement
-            req_as_string = link.egg_fragment
-
-    # a requirement specifier
-    else:
-        req_as_string = name
-
-    extras = convert_extras(extras_as_string)
-
-    def with_source(text):
-        if not line_source:
-            return text
-        return '{} (from {})'.format(text, line_source)
-
-    if req_as_string is not None:
-        try:
-            req = Requirement(req_as_string)
-        except InvalidRequirement:
-            if os.path.sep in req_as_string:
-                add_msg = "It looks like a path."
-                add_msg += deduce_helpful_msg(req_as_string)
-            elif ('=' in req_as_string and
-                  not any(op in req_as_string for op in operators)):
-                add_msg = "= is not a valid operator. Did you mean == ?"
-            else:
-                add_msg = ''
-            msg = with_source(
-                'Invalid requirement: {!r}'.format(req_as_string)
-            )
-            if add_msg:
-                msg += '\nHint: {}'.format(add_msg)
-            raise InstallationError(msg)
-    else:
-        req = None
-
-    return RequirementParts(req, link, markers, extras)
-
-
-def install_req_from_line(
-    name,  # type: str
-    comes_from=None,  # type: Optional[Union[str, InstallRequirement]]
-    use_pep517=None,  # type: Optional[bool]
-    isolated=False,  # type: bool
-    options=None,  # type: Optional[Dict[str, Any]]
-    wheel_cache=None,  # type: Optional[WheelCache]
-    constraint=False,  # type: bool
-    line_source=None,  # type: Optional[str]
-):
-    # type: (...) -> InstallRequirement
-    """Creates an InstallRequirement from a name, which might be a
-    requirement, directory containing 'setup.py', filename, or URL.
-
-    :param line_source: An optional string describing where the line is from,
-        for logging purposes in case of an error.
-    """
-    parts = parse_req_from_line(name, line_source)
-
-    return InstallRequirement(
-        parts.requirement, comes_from, link=parts.link, markers=parts.markers,
-        use_pep517=use_pep517, isolated=isolated,
-        options=options if options else {},
-        wheel_cache=wheel_cache,
-        constraint=constraint,
-        extras=parts.extras,
-    )
-
-
-def install_req_from_req_string(
-    req_string,  # type: str
-    comes_from=None,  # type: Optional[InstallRequirement]
-    isolated=False,  # type: bool
-    wheel_cache=None,  # type: Optional[WheelCache]
-    use_pep517=None  # type: Optional[bool]
-):
-    # type: (...) -> InstallRequirement
-    try:
-        req = Requirement(req_string)
-    except InvalidRequirement:
-        raise InstallationError("Invalid requirement: '%s'" % req_string)
-
-    domains_not_allowed = [
-        PyPI.file_storage_domain,
-        TestPyPI.file_storage_domain,
-    ]
-    if (req.url and comes_from and comes_from.link and
-            comes_from.link.netloc in domains_not_allowed):
-        # Explicitly disallow pypi packages that depend on external urls
-        raise InstallationError(
-            "Packages installed from PyPI cannot depend on packages "
-            "which are not also hosted on PyPI.\n"
-            "%s depends on %s " % (comes_from.name, req)
-        )
-
-    return InstallRequirement(
-        req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
-        use_pep517=use_pep517
-    )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_file.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_file.py
deleted file mode 100644
index 83b3d34..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_file.py
+++ /dev/null
@@ -1,403 +0,0 @@
-"""
-Requirements file parsing
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import optparse
-import os
-import re
-import shlex
-import sys
-
-from pip._vendor.six.moves import filterfalse
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.cli import cmdoptions
-from pip._internal.download import get_file_content
-from pip._internal.exceptions import RequirementsFileParseError
-from pip._internal.models.search_scope import SearchScope
-from pip._internal.req.constructors import (
-    install_req_from_editable,
-    install_req_from_line,
-)
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple,
-    )
-    from pip._internal.req import InstallRequirement
-    from pip._internal.cache import WheelCache
-    from pip._internal.index import PackageFinder
-    from pip._internal.network.session import PipSession
-
-    ReqFileLines = Iterator[Tuple[int, Text]]
-
-__all__ = ['parse_requirements']
-
-SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
-COMMENT_RE = re.compile(r'(^|\s+)#.*$')
-
-# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
-# variable name consisting of only uppercase letters, digits or the '_'
-# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
-# 2013 Edition.
-ENV_VAR_RE = re.compile(r'(?P\$\{(?P[A-Z0-9_]+)\})')
-
-SUPPORTED_OPTIONS = [
-    cmdoptions.constraints,
-    cmdoptions.editable,
-    cmdoptions.requirements,
-    cmdoptions.no_index,
-    cmdoptions.index_url,
-    cmdoptions.find_links,
-    cmdoptions.extra_index_url,
-    cmdoptions.always_unzip,
-    cmdoptions.no_binary,
-    cmdoptions.only_binary,
-    cmdoptions.pre,
-    cmdoptions.trusted_host,
-    cmdoptions.require_hashes,
-]  # type: List[Callable[..., optparse.Option]]
-
-# options to be passed to requirements
-SUPPORTED_OPTIONS_REQ = [
-    cmdoptions.install_options,
-    cmdoptions.global_options,
-    cmdoptions.hash,
-]  # type: List[Callable[..., optparse.Option]]
-
-# the 'dest' string values
-SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
-
-
-def parse_requirements(
-    filename,  # type: str
-    finder=None,  # type: Optional[PackageFinder]
-    comes_from=None,  # type: Optional[str]
-    options=None,  # type: Optional[optparse.Values]
-    session=None,  # type: Optional[PipSession]
-    constraint=False,  # type: bool
-    wheel_cache=None,  # type: Optional[WheelCache]
-    use_pep517=None  # type: Optional[bool]
-):
-    # type: (...) -> Iterator[InstallRequirement]
-    """Parse a requirements file and yield InstallRequirement instances.
-
-    :param filename:    Path or url of requirements file.
-    :param finder:      Instance of pip.index.PackageFinder.
-    :param comes_from:  Origin description of requirements.
-    :param options:     cli options.
-    :param session:     Instance of pip.download.PipSession.
-    :param constraint:  If true, parsing a constraint file rather than
-        requirements file.
-    :param wheel_cache: Instance of pip.wheel.WheelCache
-    :param use_pep517:  Value of the --use-pep517 option.
-    """
-    if session is None:
-        raise TypeError(
-            "parse_requirements() missing 1 required keyword argument: "
-            "'session'"
-        )
-
-    _, content = get_file_content(
-        filename, comes_from=comes_from, session=session
-    )
-
-    lines_enum = preprocess(content, options)
-
-    for line_number, line in lines_enum:
-        req_iter = process_line(line, filename, line_number, finder,
-                                comes_from, options, session, wheel_cache,
-                                use_pep517=use_pep517, constraint=constraint)
-        for req in req_iter:
-            yield req
-
-
-def preprocess(content, options):
-    # type: (Text, Optional[optparse.Values]) -> ReqFileLines
-    """Split, filter, and join lines, and return a line iterator
-
-    :param content: the content of the requirements file
-    :param options: cli options
-    """
-    lines_enum = enumerate(content.splitlines(), start=1)  # type: ReqFileLines
-    lines_enum = join_lines(lines_enum)
-    lines_enum = ignore_comments(lines_enum)
-    lines_enum = skip_regex(lines_enum, options)
-    lines_enum = expand_env_variables(lines_enum)
-    return lines_enum
-
-
-def process_line(
-    line,  # type: Text
-    filename,  # type: str
-    line_number,  # type: int
-    finder=None,  # type: Optional[PackageFinder]
-    comes_from=None,  # type: Optional[str]
-    options=None,  # type: Optional[optparse.Values]
-    session=None,  # type: Optional[PipSession]
-    wheel_cache=None,  # type: Optional[WheelCache]
-    use_pep517=None,  # type: Optional[bool]
-    constraint=False,  # type: bool
-):
-    # type: (...) -> Iterator[InstallRequirement]
-    """Process a single requirements line; This can result in creating/yielding
-    requirements, or updating the finder.
-
-    For lines that contain requirements, the only options that have an effect
-    are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
-    requirement. Other options from SUPPORTED_OPTIONS may be present, but are
-    ignored.
-
-    For lines that do not contain requirements, the only options that have an
-    effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
-    be present, but are ignored. These lines may contain multiple options
-    (although our docs imply only one is supported), and all our parsed and
-    affect the finder.
-
-    :param constraint: If True, parsing a constraints file.
-    :param options: OptionParser options that we may update
-    """
-    parser = build_parser(line)
-    defaults = parser.get_default_values()
-    defaults.index_url = None
-    if finder:
-        defaults.format_control = finder.format_control
-    args_str, options_str = break_args_options(line)
-    # Prior to 2.7.3, shlex cannot deal with unicode entries
-    if sys.version_info < (2, 7, 3):
-        # https://github.com/python/mypy/issues/1174
-        options_str = options_str.encode('utf8')  # type: ignore
-    # https://github.com/python/mypy/issues/1174
-    opts, _ = parser.parse_args(
-        shlex.split(options_str), defaults)  # type: ignore
-
-    # preserve for the nested code path
-    line_comes_from = '%s %s (line %s)' % (
-        '-c' if constraint else '-r', filename, line_number,
-    )
-
-    # yield a line requirement
-    if args_str:
-        isolated = options.isolated_mode if options else False
-        if options:
-            cmdoptions.check_install_build_global(options, opts)
-        # get the options that apply to requirements
-        req_options = {}
-        for dest in SUPPORTED_OPTIONS_REQ_DEST:
-            if dest in opts.__dict__ and opts.__dict__[dest]:
-                req_options[dest] = opts.__dict__[dest]
-        line_source = 'line {} of {}'.format(line_number, filename)
-        yield install_req_from_line(
-            args_str,
-            comes_from=line_comes_from,
-            use_pep517=use_pep517,
-            isolated=isolated,
-            options=req_options,
-            wheel_cache=wheel_cache,
-            constraint=constraint,
-            line_source=line_source,
-        )
-
-    # yield an editable requirement
-    elif opts.editables:
-        isolated = options.isolated_mode if options else False
-        yield install_req_from_editable(
-            opts.editables[0], comes_from=line_comes_from,
-            use_pep517=use_pep517,
-            constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
-        )
-
-    # parse a nested requirements file
-    elif opts.requirements or opts.constraints:
-        if opts.requirements:
-            req_path = opts.requirements[0]
-            nested_constraint = False
-        else:
-            req_path = opts.constraints[0]
-            nested_constraint = True
-        # original file is over http
-        if SCHEME_RE.search(filename):
-            # do a url join so relative paths work
-            req_path = urllib_parse.urljoin(filename, req_path)
-        # original file and nested file are paths
-        elif not SCHEME_RE.search(req_path):
-            # do a join so relative paths work
-            req_path = os.path.join(os.path.dirname(filename), req_path)
-        # TODO: Why not use `comes_from='-r {} (line {})'` here as well?
-        parsed_reqs = parse_requirements(
-            req_path, finder, comes_from, options, session,
-            constraint=nested_constraint, wheel_cache=wheel_cache
-        )
-        for req in parsed_reqs:
-            yield req
-
-    # percolate hash-checking option upward
-    elif opts.require_hashes:
-        options.require_hashes = opts.require_hashes
-
-    # set finder options
-    elif finder:
-        find_links = finder.find_links
-        index_urls = finder.index_urls
-        if opts.index_url:
-            index_urls = [opts.index_url]
-        if opts.no_index is True:
-            index_urls = []
-        if opts.extra_index_urls:
-            index_urls.extend(opts.extra_index_urls)
-        if opts.find_links:
-            # FIXME: it would be nice to keep track of the source
-            # of the find_links: support a find-links local path
-            # relative to a requirements file.
-            value = opts.find_links[0]
-            req_dir = os.path.dirname(os.path.abspath(filename))
-            relative_to_reqs_file = os.path.join(req_dir, value)
-            if os.path.exists(relative_to_reqs_file):
-                value = relative_to_reqs_file
-            find_links.append(value)
-
-        search_scope = SearchScope(
-            find_links=find_links,
-            index_urls=index_urls,
-        )
-        finder.search_scope = search_scope
-
-        if opts.pre:
-            finder.set_allow_all_prereleases()
-        for host in opts.trusted_hosts or []:
-            source = 'line {} of {}'.format(line_number, filename)
-            session.add_trusted_host(host, source=source)
-
-
-def break_args_options(line):
-    # type: (Text) -> Tuple[str, Text]
-    """Break up the line into an args and options string.  We only want to shlex
-    (and then optparse) the options, not the args.  args can contain markers
-    which are corrupted by shlex.
-    """
-    tokens = line.split(' ')
-    args = []
-    options = tokens[:]
-    for token in tokens:
-        if token.startswith('-') or token.startswith('--'):
-            break
-        else:
-            args.append(token)
-            options.pop(0)
-    return ' '.join(args), ' '.join(options)  # type: ignore
-
-
-def build_parser(line):
-    # type: (Text) -> optparse.OptionParser
-    """
-    Return a parser for parsing requirement lines
-    """
-    parser = optparse.OptionParser(add_help_option=False)
-
-    option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
-    for option_factory in option_factories:
-        option = option_factory()
-        parser.add_option(option)
-
-    # By default optparse sys.exits on parsing errors. We want to wrap
-    # that in our own exception.
-    def parser_exit(self, msg):
-        # type: (Any, str) -> NoReturn
-        # add offending line
-        msg = 'Invalid requirement: %s\n%s' % (line, msg)
-        raise RequirementsFileParseError(msg)
-    # NOTE: mypy disallows assigning to a method
-    #       https://github.com/python/mypy/issues/2427
-    parser.exit = parser_exit  # type: ignore
-
-    return parser
-
-
-def join_lines(lines_enum):
-    # type: (ReqFileLines) -> ReqFileLines
-    """Joins a line ending in '\' with the previous line (except when following
-    comments).  The joined line takes on the index of the first line.
-    """
-    primary_line_number = None
-    new_line = []  # type: List[Text]
-    for line_number, line in lines_enum:
-        if not line.endswith('\\') or COMMENT_RE.match(line):
-            if COMMENT_RE.match(line):
-                # this ensures comments are always matched later
-                line = ' ' + line
-            if new_line:
-                new_line.append(line)
-                yield primary_line_number, ''.join(new_line)
-                new_line = []
-            else:
-                yield line_number, line
-        else:
-            if not new_line:
-                primary_line_number = line_number
-            new_line.append(line.strip('\\'))
-
-    # last line contains \
-    if new_line:
-        yield primary_line_number, ''.join(new_line)
-
-    # TODO: handle space after '\'.
-
-
-def ignore_comments(lines_enum):
-    # type: (ReqFileLines) -> ReqFileLines
-    """
-    Strips comments and filter empty lines.
-    """
-    for line_number, line in lines_enum:
-        line = COMMENT_RE.sub('', line)
-        line = line.strip()
-        if line:
-            yield line_number, line
-
-
-def skip_regex(lines_enum, options):
-    # type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
-    """
-    Skip lines that match '--skip-requirements-regex' pattern
-
-    Note: the regex pattern is only built once
-    """
-    skip_regex = options.skip_requirements_regex if options else None
-    if skip_regex:
-        pattern = re.compile(skip_regex)
-        lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
-    return lines_enum
-
-
-def expand_env_variables(lines_enum):
-    # type: (ReqFileLines) -> ReqFileLines
-    """Replace all environment variables that can be retrieved via `os.getenv`.
-
-    The only allowed format for environment variables defined in the
-    requirement file is `${MY_VARIABLE_1}` to ensure two things:
-
-    1. Strings that contain a `$` aren't accidentally (partially) expanded.
-    2. Ensure consistency across platforms for requirement files.
-
-    These points are the result of a discussion on the `github pull
-    request #3514 `_.
-
-    Valid characters in variable names follow the `POSIX standard
-    `_ and are limited
-    to uppercase letter, digits and the `_` (underscore).
-    """
-    for line_number, line in lines_enum:
-        for env_var, var_name in ENV_VAR_RE.findall(line):
-            value = os.getenv(var_name)
-            if not value:
-                continue
-
-            line = line.replace(env_var, value)
-
-        yield line_number, line
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_install.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_install.py
deleted file mode 100644
index 5a8c0dc..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_install.py
+++ /dev/null
@@ -1,966 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import atexit
-import logging
-import os
-import shutil
-import sys
-import sysconfig
-import zipfile
-from distutils.util import change_root
-
-from pip._vendor import pkg_resources, six
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.pep517.wrappers import Pep517HookCaller
-
-from pip._internal import pep425tags, wheel
-from pip._internal.build_env import NoOpBuildEnvironment
-from pip._internal.exceptions import InstallationError
-from pip._internal.models.link import Link
-from pip._internal.operations.generate_metadata import get_metadata_generator
-from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
-from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.compat import native_str
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.marker_files import (
-    PIP_DELETE_MARKER_FILENAME,
-    has_delete_marker_file,
-)
-from pip._internal.utils.misc import (
-    _make_build_dir,
-    ask_path_exists,
-    backup_dir,
-    display_path,
-    dist_in_site_packages,
-    dist_in_usersite,
-    ensure_dir,
-    get_installed_version,
-    hide_url,
-    redact_auth_from_url,
-    rmtree,
-)
-from pip._internal.utils.packaging import get_metadata
-from pip._internal.utils.setuptools_build import make_setuptools_shim_args
-from pip._internal.utils.subprocess import (
-    call_subprocess,
-    runner_with_spinner_message,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.virtualenv import running_under_virtualenv
-from pip._internal.vcs import vcs
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Dict, Iterable, List, Optional, Sequence, Union,
-    )
-    from pip._internal.build_env import BuildEnvironment
-    from pip._internal.cache import WheelCache
-    from pip._internal.index import PackageFinder
-    from pip._vendor.pkg_resources import Distribution
-    from pip._vendor.packaging.specifiers import SpecifierSet
-    from pip._vendor.packaging.markers import Marker
-
-
-logger = logging.getLogger(__name__)
-
-
-class InstallRequirement(object):
-    """
-    Represents something that may be installed later on, may have information
-    about where to fetch the relevant requirement and also contains logic for
-    installing the said requirement.
-    """
-
-    def __init__(
-        self,
-        req,  # type: Optional[Requirement]
-        comes_from,  # type: Optional[Union[str, InstallRequirement]]
-        source_dir=None,  # type: Optional[str]
-        editable=False,  # type: bool
-        link=None,  # type: Optional[Link]
-        markers=None,  # type: Optional[Marker]
-        use_pep517=None,  # type: Optional[bool]
-        isolated=False,  # type: bool
-        options=None,  # type: Optional[Dict[str, Any]]
-        wheel_cache=None,  # type: Optional[WheelCache]
-        constraint=False,  # type: bool
-        extras=()  # type: Iterable[str]
-    ):
-        # type: (...) -> None
-        assert req is None or isinstance(req, Requirement), req
-        self.req = req
-        self.comes_from = comes_from
-        self.constraint = constraint
-        if source_dir is None:
-            self.source_dir = None  # type: Optional[str]
-        else:
-            self.source_dir = os.path.normpath(os.path.abspath(source_dir))
-        self.editable = editable
-
-        self._wheel_cache = wheel_cache
-        if link is None and req and req.url:
-            # PEP 508 URL requirement
-            link = Link(req.url)
-        self.link = self.original_link = link
-
-        if extras:
-            self.extras = extras
-        elif req:
-            self.extras = {
-                pkg_resources.safe_extra(extra) for extra in req.extras
-            }
-        else:
-            self.extras = set()
-        if markers is None and req:
-            markers = req.marker
-        self.markers = markers
-
-        # This holds the pkg_resources.Distribution object if this requirement
-        # is already available:
-        self.satisfied_by = None
-        # This hold the pkg_resources.Distribution object if this requirement
-        # conflicts with another installed distribution:
-        self.conflicts_with = None
-        # Temporary build location
-        self._temp_build_dir = None  # type: Optional[TempDirectory]
-        # Used to store the global directory where the _temp_build_dir should
-        # have been created. Cf move_to_correct_build_directory method.
-        self._ideal_build_dir = None  # type: Optional[str]
-        # Set to True after successful installation
-        self.install_succeeded = None  # type: Optional[bool]
-        self.options = options if options else {}
-        # Set to True after successful preparation of this requirement
-        self.prepared = False
-        self.is_direct = False
-
-        self.isolated = isolated
-        self.build_env = NoOpBuildEnvironment()  # type: BuildEnvironment
-
-        # For PEP 517, the directory where we request the project metadata
-        # gets stored. We need this to pass to build_wheel, so the backend
-        # can ensure that the wheel matches the metadata (see the PEP for
-        # details).
-        self.metadata_directory = None  # type: Optional[str]
-
-        # The static build requirements (from pyproject.toml)
-        self.pyproject_requires = None  # type: Optional[List[str]]
-
-        # Build requirements that we will check are available
-        self.requirements_to_check = []  # type: List[str]
-
-        # The PEP 517 backend we should use to build the project
-        self.pep517_backend = None  # type: Optional[Pep517HookCaller]
-
-        # Are we using PEP 517 for this requirement?
-        # After pyproject.toml has been loaded, the only valid values are True
-        # and False. Before loading, None is valid (meaning "use the default").
-        # Setting an explicit value before loading pyproject.toml is supported,
-        # but after loading this flag should be treated as read only.
-        self.use_pep517 = use_pep517
-
-    def __str__(self):
-        # type: () -> str
-        if self.req:
-            s = str(self.req)
-            if self.link:
-                s += ' from %s' % redact_auth_from_url(self.link.url)
-        elif self.link:
-            s = redact_auth_from_url(self.link.url)
-        else:
-            s = ''
-        if self.satisfied_by is not None:
-            s += ' in %s' % display_path(self.satisfied_by.location)
-        if self.comes_from:
-            if isinstance(self.comes_from, six.string_types):
-                comes_from = self.comes_from  # type: Optional[str]
-            else:
-                comes_from = self.comes_from.from_path()
-            if comes_from:
-                s += ' (from %s)' % comes_from
-        return s
-
-    def __repr__(self):
-        # type: () -> str
-        return '<%s object: %s editable=%r>' % (
-            self.__class__.__name__, str(self), self.editable)
-
-    def format_debug(self):
-        # type: () -> str
-        """An un-tested helper for getting state, for debugging.
-        """
-        attributes = vars(self)
-        names = sorted(attributes)
-
-        state = (
-            "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
-        )
-        return '<{name} object: {{{state}}}>'.format(
-            name=self.__class__.__name__,
-            state=", ".join(state),
-        )
-
-    def populate_link(self, finder, upgrade, require_hashes):
-        # type: (PackageFinder, bool, bool) -> None
-        """Ensure that if a link can be found for this, that it is found.
-
-        Note that self.link may still be None - if Upgrade is False and the
-        requirement is already installed.
-
-        If require_hashes is True, don't use the wheel cache, because cached
-        wheels, always built locally, have different hashes than the files
-        downloaded from the index server and thus throw false hash mismatches.
-        Furthermore, cached wheels at present have undeterministic contents due
-        to file modification times.
-        """
-        if self.link is None:
-            self.link = finder.find_requirement(self, upgrade)
-        if self._wheel_cache is not None and not require_hashes:
-            old_link = self.link
-            supported_tags = pep425tags.get_supported()
-            self.link = self._wheel_cache.get(
-                link=self.link,
-                package_name=self.name,
-                supported_tags=supported_tags,
-            )
-            if old_link != self.link:
-                logger.debug('Using cached wheel link: %s', self.link)
-
-    # Things that are valid for all kinds of requirements?
-    @property
-    def name(self):
-        # type: () -> Optional[str]
-        if self.req is None:
-            return None
-        return native_str(pkg_resources.safe_name(self.req.name))
-
-    @property
-    def specifier(self):
-        # type: () -> SpecifierSet
-        return self.req.specifier
-
-    @property
-    def is_pinned(self):
-        # type: () -> bool
-        """Return whether I am pinned to an exact version.
-
-        For example, some-package==1.2 is pinned; some-package>1.2 is not.
-        """
-        specifiers = self.specifier
-        return (len(specifiers) == 1 and
-                next(iter(specifiers)).operator in {'==', '==='})
-
-    @property
-    def installed_version(self):
-        # type: () -> Optional[str]
-        return get_installed_version(self.name)
-
-    def match_markers(self, extras_requested=None):
-        # type: (Optional[Iterable[str]]) -> bool
-        if not extras_requested:
-            # Provide an extra to safely evaluate the markers
-            # without matching any extra
-            extras_requested = ('',)
-        if self.markers is not None:
-            return any(
-                self.markers.evaluate({'extra': extra})
-                for extra in extras_requested)
-        else:
-            return True
-
-    @property
-    def has_hash_options(self):
-        # type: () -> bool
-        """Return whether any known-good hashes are specified as options.
-
-        These activate --require-hashes mode; hashes specified as part of a
-        URL do not.
-
-        """
-        return bool(self.options.get('hashes', {}))
-
-    def hashes(self, trust_internet=True):
-        # type: (bool) -> Hashes
-        """Return a hash-comparer that considers my option- and URL-based
-        hashes to be known-good.
-
-        Hashes in URLs--ones embedded in the requirements file, not ones
-        downloaded from an index server--are almost peers with ones from
-        flags. They satisfy --require-hashes (whether it was implicitly or
-        explicitly activated) but do not activate it. md5 and sha224 are not
-        allowed in flags, which should nudge people toward good algos. We
-        always OR all hashes together, even ones from URLs.
-
-        :param trust_internet: Whether to trust URL-based (#md5=...) hashes
-            downloaded from the internet, as by populate_link()
-
-        """
-        good_hashes = self.options.get('hashes', {}).copy()
-        link = self.link if trust_internet else self.original_link
-        if link and link.hash:
-            good_hashes.setdefault(link.hash_name, []).append(link.hash)
-        return Hashes(good_hashes)
-
-    def from_path(self):
-        # type: () -> Optional[str]
-        """Format a nice indicator to show where this "comes from"
-        """
-        if self.req is None:
-            return None
-        s = str(self.req)
-        if self.comes_from:
-            if isinstance(self.comes_from, six.string_types):
-                comes_from = self.comes_from
-            else:
-                comes_from = self.comes_from.from_path()
-            if comes_from:
-                s += '->' + comes_from
-        return s
-
-    def ensure_build_location(self, build_dir):
-        # type: (str) -> str
-        assert build_dir is not None
-        if self._temp_build_dir is not None:
-            assert self._temp_build_dir.path
-            return self._temp_build_dir.path
-        if self.req is None:
-            # for requirement via a path to a directory: the name of the
-            # package is not available yet so we create a temp directory
-            # Once run_egg_info will have run, we'll be able to fix it via
-            # move_to_correct_build_directory().
-            # Some systems have /tmp as a symlink which confuses custom
-            # builds (such as numpy). Thus, we ensure that the real path
-            # is returned.
-            self._temp_build_dir = TempDirectory(kind="req-build")
-            self._ideal_build_dir = build_dir
-
-            return self._temp_build_dir.path
-        if self.editable:
-            name = self.name.lower()
-        else:
-            name = self.name
-        # FIXME: Is there a better place to create the build_dir? (hg and bzr
-        # need this)
-        if not os.path.exists(build_dir):
-            logger.debug('Creating directory %s', build_dir)
-            _make_build_dir(build_dir)
-        return os.path.join(build_dir, name)
-
-    def move_to_correct_build_directory(self):
-        # type: () -> None
-        """Move self._temp_build_dir to "self._ideal_build_dir/self.req.name"
-
-        For some requirements (e.g. a path to a directory), the name of the
-        package is not available until we run egg_info, so the build_location
-        will return a temporary directory and store the _ideal_build_dir.
-
-        This is only called to "fix" the build directory after generating
-        metadata.
-        """
-        if self.source_dir is not None:
-            return
-        assert self.req is not None
-        assert self._temp_build_dir
-        assert (
-            self._ideal_build_dir is not None and
-            self._ideal_build_dir.path  # type: ignore
-        )
-        old_location = self._temp_build_dir
-        self._temp_build_dir = None  # checked inside ensure_build_location
-
-        # Figure out the correct place to put the files.
-        new_location = self.ensure_build_location(self._ideal_build_dir)
-        if os.path.exists(new_location):
-            raise InstallationError(
-                'A package already exists in %s; please remove it to continue'
-                % display_path(new_location)
-            )
-
-        # Move the files to the correct location.
-        logger.debug(
-            'Moving package %s from %s to new location %s',
-            self, display_path(old_location.path), display_path(new_location),
-        )
-        shutil.move(old_location.path, new_location)
-
-        # Update directory-tracking variables, to be in line with new_location
-        self.source_dir = os.path.normpath(os.path.abspath(new_location))
-        self._temp_build_dir = TempDirectory(
-            path=new_location, kind="req-install",
-        )
-
-        # Correct the metadata directory, if it exists
-        if self.metadata_directory:
-            old_meta = self.metadata_directory
-            rel = os.path.relpath(old_meta, start=old_location.path)
-            new_meta = os.path.join(new_location, rel)
-            new_meta = os.path.normpath(os.path.abspath(new_meta))
-            self.metadata_directory = new_meta
-
-        # Done with any "move built files" work, since have moved files to the
-        # "ideal" build location. Setting to None allows to clearly flag that
-        # no more moves are needed.
-        self._ideal_build_dir = None
-
-    def remove_temporary_source(self):
-        # type: () -> None
-        """Remove the source files from this requirement, if they are marked
-        for deletion"""
-        if self.source_dir and has_delete_marker_file(self.source_dir):
-            logger.debug('Removing source in %s', self.source_dir)
-            rmtree(self.source_dir)
-        self.source_dir = None
-        if self._temp_build_dir:
-            self._temp_build_dir.cleanup()
-            self._temp_build_dir = None
-        self.build_env.cleanup()
-
-    def check_if_exists(self, use_user_site):
-        # type: (bool) -> bool
-        """Find an installed distribution that satisfies or conflicts
-        with this requirement, and set self.satisfied_by or
-        self.conflicts_with appropriately.
-        """
-        if self.req is None:
-            return False
-        try:
-            # get_distribution() will resolve the entire list of requirements
-            # anyway, and we've already determined that we need the requirement
-            # in question, so strip the marker so that we don't try to
-            # evaluate it.
-            no_marker = Requirement(str(self.req))
-            no_marker.marker = None
-            self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
-            if self.editable and self.satisfied_by:
-                self.conflicts_with = self.satisfied_by
-                # when installing editables, nothing pre-existing should ever
-                # satisfy
-                self.satisfied_by = None
-                return True
-        except pkg_resources.DistributionNotFound:
-            return False
-        except pkg_resources.VersionConflict:
-            existing_dist = pkg_resources.get_distribution(
-                self.req.name
-            )
-            if use_user_site:
-                if dist_in_usersite(existing_dist):
-                    self.conflicts_with = existing_dist
-                elif (running_under_virtualenv() and
-                        dist_in_site_packages(existing_dist)):
-                    raise InstallationError(
-                        "Will not install to the user site because it will "
-                        "lack sys.path precedence to %s in %s" %
-                        (existing_dist.project_name, existing_dist.location)
-                    )
-            else:
-                self.conflicts_with = existing_dist
-        return True
-
-    # Things valid for wheels
-    @property
-    def is_wheel(self):
-        # type: () -> bool
-        if not self.link:
-            return False
-        return self.link.is_wheel
-
-    def move_wheel_files(
-        self,
-        wheeldir,  # type: str
-        root=None,  # type: Optional[str]
-        home=None,  # type: Optional[str]
-        prefix=None,  # type: Optional[str]
-        warn_script_location=True,  # type: bool
-        use_user_site=False,  # type: bool
-        pycompile=True  # type: bool
-    ):
-        # type: (...) -> None
-        wheel.move_wheel_files(
-            self.name, self.req, wheeldir,
-            user=use_user_site,
-            home=home,
-            root=root,
-            prefix=prefix,
-            pycompile=pycompile,
-            isolated=self.isolated,
-            warn_script_location=warn_script_location,
-        )
-
-    # Things valid for sdists
-    @property
-    def unpacked_source_directory(self):
-        # type: () -> str
-        return os.path.join(
-            self.source_dir,
-            self.link and self.link.subdirectory_fragment or '')
-
-    @property
-    def setup_py_path(self):
-        # type: () -> str
-        assert self.source_dir, "No source dir for %s" % self
-        setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
-
-        # Python2 __file__ should not be unicode
-        if six.PY2 and isinstance(setup_py, six.text_type):
-            setup_py = setup_py.encode(sys.getfilesystemencoding())
-
-        return setup_py
-
-    @property
-    def pyproject_toml_path(self):
-        # type: () -> str
-        assert self.source_dir, "No source dir for %s" % self
-        return make_pyproject_path(self.unpacked_source_directory)
-
-    def load_pyproject_toml(self):
-        # type: () -> None
-        """Load the pyproject.toml file.
-
-        After calling this routine, all of the attributes related to PEP 517
-        processing for this requirement have been set. In particular, the
-        use_pep517 attribute can be used to determine whether we should
-        follow the PEP 517 or legacy (setup.py) code path.
-        """
-        pyproject_toml_data = load_pyproject_toml(
-            self.use_pep517,
-            self.pyproject_toml_path,
-            self.setup_py_path,
-            str(self)
-        )
-
-        if pyproject_toml_data is None:
-            self.use_pep517 = False
-            return
-
-        self.use_pep517 = True
-        requires, backend, check = pyproject_toml_data
-        self.requirements_to_check = check
-        self.pyproject_requires = requires
-        self.pep517_backend = Pep517HookCaller(
-            self.unpacked_source_directory, backend
-        )
-
-    def prepare_metadata(self):
-        # type: () -> None
-        """Ensure that project metadata is available.
-
-        Under PEP 517, call the backend hook to prepare the metadata.
-        Under legacy processing, call setup.py egg-info.
-        """
-        assert self.source_dir
-
-        metadata_generator = get_metadata_generator(self)
-        with indent_log():
-            self.metadata_directory = metadata_generator(self)
-
-        if not self.req:
-            if isinstance(parse_version(self.metadata["Version"]), Version):
-                op = "=="
-            else:
-                op = "==="
-            self.req = Requirement(
-                "".join([
-                    self.metadata["Name"],
-                    op,
-                    self.metadata["Version"],
-                ])
-            )
-            self.move_to_correct_build_directory()
-        else:
-            metadata_name = canonicalize_name(self.metadata["Name"])
-            if canonicalize_name(self.req.name) != metadata_name:
-                logger.warning(
-                    'Generating metadata for package %s '
-                    'produced metadata for project name %s. Fix your '
-                    '#egg=%s fragments.',
-                    self.name, metadata_name, self.name
-                )
-                self.req = Requirement(metadata_name)
-
-    def prepare_pep517_metadata(self):
-        # type: () -> str
-        assert self.pep517_backend is not None
-
-        # NOTE: This needs to be refactored to stop using atexit
-        metadata_tmpdir = TempDirectory(kind="modern-metadata")
-        atexit.register(metadata_tmpdir.cleanup)
-
-        metadata_dir = metadata_tmpdir.path
-
-        with self.build_env:
-            # Note that Pep517HookCaller implements a fallback for
-            # prepare_metadata_for_build_wheel, so we don't have to
-            # consider the possibility that this hook doesn't exist.
-            runner = runner_with_spinner_message("Preparing wheel metadata")
-            backend = self.pep517_backend
-            with backend.subprocess_runner(runner):
-                distinfo_dir = backend.prepare_metadata_for_build_wheel(
-                    metadata_dir
-                )
-
-        return os.path.join(metadata_dir, distinfo_dir)
-
-    @property
-    def metadata(self):
-        # type: () -> Any
-        if not hasattr(self, '_metadata'):
-            self._metadata = get_metadata(self.get_dist())
-
-        return self._metadata
-
-    def get_dist(self):
-        # type: () -> Distribution
-        """Return a pkg_resources.Distribution for this requirement"""
-        dist_dir = self.metadata_directory.rstrip(os.sep)
-
-        # Determine the correct Distribution object type.
-        if dist_dir.endswith(".egg-info"):
-            dist_cls = pkg_resources.Distribution
-        else:
-            assert dist_dir.endswith(".dist-info")
-            dist_cls = pkg_resources.DistInfoDistribution
-
-        # Build a PathMetadata object, from path to metadata. :wink:
-        base_dir, dist_dir_name = os.path.split(dist_dir)
-        dist_name = os.path.splitext(dist_dir_name)[0]
-        metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
-
-        return dist_cls(
-            base_dir,
-            project_name=dist_name,
-            metadata=metadata,
-        )
-
-    def assert_source_matches_version(self):
-        # type: () -> None
-        assert self.source_dir
-        version = self.metadata['version']
-        if self.req.specifier and version not in self.req.specifier:
-            logger.warning(
-                'Requested %s, but installing version %s',
-                self,
-                version,
-            )
-        else:
-            logger.debug(
-                'Source in %s has version %s, which satisfies requirement %s',
-                display_path(self.source_dir),
-                version,
-                self,
-            )
-
-    # For both source distributions and editables
-    def ensure_has_source_dir(self, parent_dir):
-        # type: (str) -> None
-        """Ensure that a source_dir is set.
-
-        This will create a temporary build dir if the name of the requirement
-        isn't known yet.
-
-        :param parent_dir: The ideal pip parent_dir for the source_dir.
-            Generally src_dir for editables and build_dir for sdists.
-        :return: self.source_dir
-        """
-        if self.source_dir is None:
-            self.source_dir = self.ensure_build_location(parent_dir)
-
-    # For editable installations
-    def install_editable(
-        self,
-        install_options,  # type: List[str]
-        global_options=(),  # type: Sequence[str]
-        prefix=None  # type: Optional[str]
-    ):
-        # type: (...) -> None
-        logger.info('Running setup.py develop for %s', self.name)
-
-        if prefix:
-            prefix_param = ['--prefix={}'.format(prefix)]
-            install_options = list(install_options) + prefix_param
-        base_cmd = make_setuptools_shim_args(
-            self.setup_py_path,
-            global_options=global_options,
-            no_user_config=self.isolated
-        )
-        with indent_log():
-            with self.build_env:
-                call_subprocess(
-                    base_cmd +
-                    ['develop', '--no-deps'] +
-                    list(install_options),
-                    cwd=self.unpacked_source_directory,
-                )
-
-        self.install_succeeded = True
-
-    def update_editable(self, obtain=True):
-        # type: (bool) -> None
-        if not self.link:
-            logger.debug(
-                "Cannot update repository at %s; repository location is "
-                "unknown",
-                self.source_dir,
-            )
-            return
-        assert self.editable
-        assert self.source_dir
-        if self.link.scheme == 'file':
-            # Static paths don't get updated
-            return
-        assert '+' in self.link.url, "bad url: %r" % self.link.url
-        vc_type, url = self.link.url.split('+', 1)
-        vcs_backend = vcs.get_backend(vc_type)
-        if vcs_backend:
-            hidden_url = hide_url(self.link.url)
-            if obtain:
-                vcs_backend.obtain(self.source_dir, url=hidden_url)
-            else:
-                vcs_backend.export(self.source_dir, url=hidden_url)
-        else:
-            assert 0, (
-                'Unexpected version control type (in %s): %s'
-                % (self.link, vc_type))
-
-    # Top-level Actions
-    def uninstall(self, auto_confirm=False, verbose=False,
-                  use_user_site=False):
-        # type: (bool, bool, bool) -> Optional[UninstallPathSet]
-        """
-        Uninstall the distribution currently satisfying this requirement.
-
-        Prompts before removing or modifying files unless
-        ``auto_confirm`` is True.
-
-        Refuses to delete or modify files outside of ``sys.prefix`` -
-        thus uninstallation within a virtual environment can only
-        modify that virtual environment, even if the virtualenv is
-        linked to global site-packages.
-
-        """
-        if not self.check_if_exists(use_user_site):
-            logger.warning("Skipping %s as it is not installed.", self.name)
-            return None
-        dist = self.satisfied_by or self.conflicts_with
-
-        uninstalled_pathset = UninstallPathSet.from_dist(dist)
-        uninstalled_pathset.remove(auto_confirm, verbose)
-        return uninstalled_pathset
-
-    def _clean_zip_name(self, name, prefix):  # only used by archive.
-        # type: (str, str) -> str
-        assert name.startswith(prefix + os.path.sep), (
-            "name %r doesn't start with prefix %r" % (name, prefix)
-        )
-        name = name[len(prefix) + 1:]
-        name = name.replace(os.path.sep, '/')
-        return name
-
-    def _get_archive_name(self, path, parentdir, rootdir):
-        # type: (str, str, str) -> str
-        path = os.path.join(parentdir, path)
-        name = self._clean_zip_name(path, rootdir)
-        return self.name + '/' + name
-
-    def archive(self, build_dir):
-        # type: (str) -> None
-        """Saves archive to provided build_dir.
-
-        Used for saving downloaded VCS requirements as part of `pip download`.
-        """
-        assert self.source_dir
-
-        create_archive = True
-        archive_name = '%s-%s.zip' % (self.name, self.metadata["version"])
-        archive_path = os.path.join(build_dir, archive_name)
-
-        if os.path.exists(archive_path):
-            response = ask_path_exists(
-                'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' %
-                display_path(archive_path), ('i', 'w', 'b', 'a'))
-            if response == 'i':
-                create_archive = False
-            elif response == 'w':
-                logger.warning('Deleting %s', display_path(archive_path))
-                os.remove(archive_path)
-            elif response == 'b':
-                dest_file = backup_dir(archive_path)
-                logger.warning(
-                    'Backing up %s to %s',
-                    display_path(archive_path),
-                    display_path(dest_file),
-                )
-                shutil.move(archive_path, dest_file)
-            elif response == 'a':
-                sys.exit(-1)
-
-        if not create_archive:
-            return
-
-        zip_output = zipfile.ZipFile(
-            archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
-        )
-        with zip_output:
-            dir = os.path.normcase(
-                os.path.abspath(self.unpacked_source_directory)
-            )
-            for dirpath, dirnames, filenames in os.walk(dir):
-                if 'pip-egg-info' in dirnames:
-                    dirnames.remove('pip-egg-info')
-                for dirname in dirnames:
-                    dir_arcname = self._get_archive_name(
-                        dirname, parentdir=dirpath, rootdir=dir,
-                    )
-                    zipdir = zipfile.ZipInfo(dir_arcname + '/')
-                    zipdir.external_attr = 0x1ED << 16  # 0o755
-                    zip_output.writestr(zipdir, '')
-                for filename in filenames:
-                    if filename == PIP_DELETE_MARKER_FILENAME:
-                        continue
-                    file_arcname = self._get_archive_name(
-                        filename, parentdir=dirpath, rootdir=dir,
-                    )
-                    filename = os.path.join(dirpath, filename)
-                    zip_output.write(filename, file_arcname)
-
-        logger.info('Saved %s', display_path(archive_path))
-
-    def install(
-        self,
-        install_options,  # type: List[str]
-        global_options=None,  # type: Optional[Sequence[str]]
-        root=None,  # type: Optional[str]
-        home=None,  # type: Optional[str]
-        prefix=None,  # type: Optional[str]
-        warn_script_location=True,  # type: bool
-        use_user_site=False,  # type: bool
-        pycompile=True  # type: bool
-    ):
-        # type: (...) -> None
-        global_options = global_options if global_options is not None else []
-        if self.editable:
-            self.install_editable(
-                install_options, global_options, prefix=prefix,
-            )
-            return
-        if self.is_wheel:
-            version = wheel.wheel_version(self.source_dir)
-            wheel.check_compatibility(version, self.name)
-
-            self.move_wheel_files(
-                self.source_dir, root=root, prefix=prefix, home=home,
-                warn_script_location=warn_script_location,
-                use_user_site=use_user_site, pycompile=pycompile,
-            )
-            self.install_succeeded = True
-            return
-
-        # Extend the list of global and install options passed on to
-        # the setup.py call with the ones from the requirements file.
-        # Options specified in requirements file override those
-        # specified on the command line, since the last option given
-        # to setup.py is the one that is used.
-        global_options = list(global_options) + \
-            self.options.get('global_options', [])
-        install_options = list(install_options) + \
-            self.options.get('install_options', [])
-
-        with TempDirectory(kind="record") as temp_dir:
-            record_filename = os.path.join(temp_dir.path, 'install-record.txt')
-            install_args = self.get_install_args(
-                global_options, record_filename, root, prefix, pycompile,
-            )
-
-            runner = runner_with_spinner_message(
-                "Running setup.py install for {}".format(self.name)
-            )
-            with indent_log(), self.build_env:
-                runner(
-                    cmd=install_args + install_options,
-                    cwd=self.unpacked_source_directory,
-                )
-
-            if not os.path.exists(record_filename):
-                logger.debug('Record file %s not found', record_filename)
-                return
-            self.install_succeeded = True
-
-            def prepend_root(path):
-                # type: (str) -> str
-                if root is None or not os.path.isabs(path):
-                    return path
-                else:
-                    return change_root(root, path)
-
-            with open(record_filename) as f:
-                for line in f:
-                    directory = os.path.dirname(line)
-                    if directory.endswith('.egg-info'):
-                        egg_info_dir = prepend_root(directory)
-                        break
-                else:
-                    logger.warning(
-                        'Could not find .egg-info directory in install record'
-                        ' for %s',
-                        self,
-                    )
-                    # FIXME: put the record somewhere
-                    return
-            new_lines = []
-            with open(record_filename) as f:
-                for line in f:
-                    filename = line.strip()
-                    if os.path.isdir(filename):
-                        filename += os.path.sep
-                    new_lines.append(
-                        os.path.relpath(prepend_root(filename), egg_info_dir)
-                    )
-            new_lines.sort()
-            ensure_dir(egg_info_dir)
-            inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
-            with open(inst_files_path, 'w') as f:
-                f.write('\n'.join(new_lines) + '\n')
-
-    def get_install_args(
-        self,
-        global_options,  # type: Sequence[str]
-        record_filename,  # type: str
-        root,  # type: Optional[str]
-        prefix,  # type: Optional[str]
-        pycompile  # type: bool
-    ):
-        # type: (...) -> List[str]
-        install_args = make_setuptools_shim_args(
-            self.setup_py_path,
-            global_options=global_options,
-            no_user_config=self.isolated,
-            unbuffered_output=True
-        )
-        install_args += ['install', '--record', record_filename]
-        install_args += ['--single-version-externally-managed']
-
-        if root is not None:
-            install_args += ['--root', root]
-        if prefix is not None:
-            install_args += ['--prefix', prefix]
-
-        if pycompile:
-            install_args += ["--compile"]
-        else:
-            install_args += ["--no-compile"]
-
-        if running_under_virtualenv():
-            py_ver_str = 'python' + sysconfig.get_python_version()
-            install_args += ['--install-headers',
-                             os.path.join(sys.prefix, 'include', 'site',
-                                          py_ver_str, self.name)]
-
-        return install_args
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_set.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_set.py
deleted file mode 100644
index b34a2bb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_set.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import logging
-from collections import OrderedDict
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal import pep425tags
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.wheel import Wheel
-
-if MYPY_CHECK_RUNNING:
-    from typing import Dict, Iterable, List, Optional, Tuple
-    from pip._internal.req.req_install import InstallRequirement
-
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementSet(object):
-
-    def __init__(self, require_hashes=False, check_supported_wheels=True):
-        # type: (bool, bool) -> None
-        """Create a RequirementSet.
-        """
-
-        self.requirements = OrderedDict()  # type: Dict[str, InstallRequirement]  # noqa: E501
-        self.require_hashes = require_hashes
-        self.check_supported_wheels = check_supported_wheels
-
-        self.unnamed_requirements = []  # type: List[InstallRequirement]
-        self.successfully_downloaded = []  # type: List[InstallRequirement]
-        self.reqs_to_cleanup = []  # type: List[InstallRequirement]
-
-    def __str__(self):
-        # type: () -> str
-        requirements = sorted(
-            (req for req in self.requirements.values() if not req.comes_from),
-            key=lambda req: canonicalize_name(req.name),
-        )
-        return ' '.join(str(req.req) for req in requirements)
-
-    def __repr__(self):
-        # type: () -> str
-        requirements = sorted(
-            self.requirements.values(),
-            key=lambda req: canonicalize_name(req.name),
-        )
-
-        format_string = '<{classname} object; {count} requirement(s): {reqs}>'
-        return format_string.format(
-            classname=self.__class__.__name__,
-            count=len(requirements),
-            reqs=', '.join(str(req.req) for req in requirements),
-        )
-
-    def add_unnamed_requirement(self, install_req):
-        # type: (InstallRequirement) -> None
-        assert not install_req.name
-        self.unnamed_requirements.append(install_req)
-
-    def add_named_requirement(self, install_req):
-        # type: (InstallRequirement) -> None
-        assert install_req.name
-
-        project_name = canonicalize_name(install_req.name)
-        self.requirements[project_name] = install_req
-
-    def add_requirement(
-        self,
-        install_req,  # type: InstallRequirement
-        parent_req_name=None,  # type: Optional[str]
-        extras_requested=None  # type: Optional[Iterable[str]]
-    ):
-        # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]  # noqa: E501
-        """Add install_req as a requirement to install.
-
-        :param parent_req_name: The name of the requirement that needed this
-            added. The name is used because when multiple unnamed requirements
-            resolve to the same name, we could otherwise end up with dependency
-            links that point outside the Requirements set. parent_req must
-            already be added. Note that None implies that this is a user
-            supplied requirement, vs an inferred one.
-        :param extras_requested: an iterable of extras used to evaluate the
-            environment markers.
-        :return: Additional requirements to scan. That is either [] if
-            the requirement is not applicable, or [install_req] if the
-            requirement is applicable and has just been added.
-        """
-        # If the markers do not match, ignore this requirement.
-        if not install_req.match_markers(extras_requested):
-            logger.info(
-                "Ignoring %s: markers '%s' don't match your environment",
-                install_req.name, install_req.markers,
-            )
-            return [], None
-
-        # If the wheel is not supported, raise an error.
-        # Should check this after filtering out based on environment markers to
-        # allow specifying different wheels based on the environment/OS, in a
-        # single requirements file.
-        if install_req.link and install_req.link.is_wheel:
-            wheel = Wheel(install_req.link.filename)
-            tags = pep425tags.get_supported()
-            if (self.check_supported_wheels and not wheel.supported(tags)):
-                raise InstallationError(
-                    "%s is not a supported wheel on this platform." %
-                    wheel.filename
-                )
-
-        # This next bit is really a sanity check.
-        assert install_req.is_direct == (parent_req_name is None), (
-            "a direct req shouldn't have a parent and also, "
-            "a non direct req should have a parent"
-        )
-
-        # Unnamed requirements are scanned again and the requirement won't be
-        # added as a dependency until after scanning.
-        if not install_req.name:
-            self.add_unnamed_requirement(install_req)
-            return [install_req], None
-
-        try:
-            existing_req = self.get_requirement(install_req.name)
-        except KeyError:
-            existing_req = None
-
-        has_conflicting_requirement = (
-            parent_req_name is None and
-            existing_req and
-            not existing_req.constraint and
-            existing_req.extras == install_req.extras and
-            existing_req.req.specifier != install_req.req.specifier
-        )
-        if has_conflicting_requirement:
-            raise InstallationError(
-                "Double requirement given: %s (already in %s, name=%r)"
-                % (install_req, existing_req, install_req.name)
-            )
-
-        # When no existing requirement exists, add the requirement as a
-        # dependency and it will be scanned again after.
-        if not existing_req:
-            self.add_named_requirement(install_req)
-            # We'd want to rescan this requirement later
-            return [install_req], install_req
-
-        # Assume there's no need to scan, and that we've already
-        # encountered this for scanning.
-        if install_req.constraint or not existing_req.constraint:
-            return [], existing_req
-
-        does_not_satisfy_constraint = (
-            install_req.link and
-            not (
-                existing_req.link and
-                install_req.link.path == existing_req.link.path
-            )
-        )
-        if does_not_satisfy_constraint:
-            self.reqs_to_cleanup.append(install_req)
-            raise InstallationError(
-                "Could not satisfy constraints for '%s': "
-                "installation from path or url cannot be "
-                "constrained to a version" % install_req.name,
-            )
-        # If we're now installing a constraint, mark the existing
-        # object for real installation.
-        existing_req.constraint = False
-        existing_req.extras = tuple(sorted(
-            set(existing_req.extras) | set(install_req.extras)
-        ))
-        logger.debug(
-            "Setting %s extras to: %s",
-            existing_req, existing_req.extras,
-        )
-        # Return the existing requirement for addition to the parent and
-        # scanning again.
-        return [existing_req], existing_req
-
-    def has_requirement(self, name):
-        # type: (str) -> bool
-        project_name = canonicalize_name(name)
-
-        return (
-            project_name in self.requirements and
-            not self.requirements[project_name].constraint
-        )
-
-    def get_requirement(self, name):
-        # type: (str) -> InstallRequirement
-        project_name = canonicalize_name(name)
-
-        if project_name in self.requirements:
-            return self.requirements[project_name]
-
-        raise KeyError("No project with the name %r" % name)
-
-    def cleanup_files(self):
-        # type: () -> None
-        """Clean up files, remove builds."""
-        logger.debug('Cleaning up...')
-        with indent_log():
-            for req in self.reqs_to_cleanup:
-                req.remove_temporary_source()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
deleted file mode 100644
index aa57c79..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_tracker.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import hashlib
-import logging
-import os
-
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from types import TracebackType
-    from typing import Iterator, Optional, Set, Type
-    from pip._internal.req.req_install import InstallRequirement
-    from pip._internal.models.link import Link
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementTracker(object):
-
-    def __init__(self):
-        # type: () -> None
-        self._root = os.environ.get('PIP_REQ_TRACKER')
-        if self._root is None:
-            self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
-            self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
-            logger.debug('Created requirements tracker %r', self._root)
-        else:
-            self._temp_dir = None
-            logger.debug('Re-using requirements tracker %r', self._root)
-        self._entries = set()  # type: Set[InstallRequirement]
-
-    def __enter__(self):
-        # type: () -> RequirementTracker
-        return self
-
-    def __exit__(
-        self,
-        exc_type,  # type: Optional[Type[BaseException]]
-        exc_val,  # type: Optional[BaseException]
-        exc_tb  # type: Optional[TracebackType]
-    ):
-        # type: (...) -> None
-        self.cleanup()
-
-    def _entry_path(self, link):
-        # type: (Link) -> str
-        hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
-        return os.path.join(self._root, hashed)
-
-    def add(self, req):
-        # type: (InstallRequirement) -> None
-        link = req.link
-        info = str(req)
-        entry_path = self._entry_path(link)
-        try:
-            with open(entry_path) as fp:
-                # Error, these's already a build in progress.
-                raise LookupError('%s is already being built: %s'
-                                  % (link, fp.read()))
-        except IOError as e:
-            if e.errno != errno.ENOENT:
-                raise
-            assert req not in self._entries
-            with open(entry_path, 'w') as fp:
-                fp.write(info)
-            self._entries.add(req)
-            logger.debug('Added %s to build tracker %r', req, self._root)
-
-    def remove(self, req):
-        # type: (InstallRequirement) -> None
-        link = req.link
-        self._entries.remove(req)
-        os.unlink(self._entry_path(link))
-        logger.debug('Removed %s from build tracker %r', req, self._root)
-
-    def cleanup(self):
-        # type: () -> None
-        for req in set(self._entries):
-            self.remove(req)
-        remove = self._temp_dir is not None
-        if remove:
-            self._temp_dir.cleanup()
-        logger.debug('%s build tracker %r',
-                     'Removed' if remove else 'Cleaned',
-                     self._root)
-
-    @contextlib.contextmanager
-    def track(self, req):
-        # type: (InstallRequirement) -> Iterator[None]
-        self.add(req)
-        yield
-        self.remove(req)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
deleted file mode 100644
index 3acde91..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/req/req_uninstall.py
+++ /dev/null
@@ -1,644 +0,0 @@
-from __future__ import absolute_import
-
-import csv
-import functools
-import logging
-import os
-import sys
-import sysconfig
-
-from pip._vendor import pkg_resources
-
-from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import bin_py, bin_user
-from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
-    FakeFile,
-    ask,
-    dist_in_usersite,
-    dist_is_local,
-    egg_link_path,
-    is_local,
-    normalize_path,
-    renames,
-    rmtree,
-)
-from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
-    )
-    from pip._vendor.pkg_resources import Distribution
-
-logger = logging.getLogger(__name__)
-
-
-def _script_names(dist, script_name, is_gui):
-    # type: (Distribution, str, bool) -> List[str]
-    """Create the fully qualified name of the files created by
-    {console,gui}_scripts for the given ``dist``.
-    Returns the list of file names
-    """
-    if dist_in_usersite(dist):
-        bin_dir = bin_user
-    else:
-        bin_dir = bin_py
-    exe_name = os.path.join(bin_dir, script_name)
-    paths_to_remove = [exe_name]
-    if WINDOWS:
-        paths_to_remove.append(exe_name + '.exe')
-        paths_to_remove.append(exe_name + '.exe.manifest')
-        if is_gui:
-            paths_to_remove.append(exe_name + '-script.pyw')
-        else:
-            paths_to_remove.append(exe_name + '-script.py')
-    return paths_to_remove
-
-
-def _unique(fn):
-    # type: (Callable) -> Callable[..., Iterator[Any]]
-    @functools.wraps(fn)
-    def unique(*args, **kw):
-        # type: (Any, Any) -> Iterator[Any]
-        seen = set()  # type: Set[Any]
-        for item in fn(*args, **kw):
-            if item not in seen:
-                seen.add(item)
-                yield item
-    return unique
-
-
-@_unique
-def uninstallation_paths(dist):
-    # type: (Distribution) -> Iterator[str]
-    """
-    Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
-
-    Yield paths to all the files in RECORD. For each .py file in RECORD, add
-    the .pyc and .pyo in the same directory.
-
-    UninstallPathSet.add() takes care of the __pycache__ .py[co].
-    """
-    r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
-    for row in r:
-        path = os.path.join(dist.location, row[0])
-        yield path
-        if path.endswith('.py'):
-            dn, fn = os.path.split(path)
-            base = fn[:-3]
-            path = os.path.join(dn, base + '.pyc')
-            yield path
-            path = os.path.join(dn, base + '.pyo')
-            yield path
-
-
-def compact(paths):
-    # type: (Iterable[str]) -> Set[str]
-    """Compact a path set to contain the minimal number of paths
-    necessary to contain all paths in the set. If /a/path/ and
-    /a/path/to/a/file.txt are both in the set, leave only the
-    shorter path."""
-
-    sep = os.path.sep
-    short_paths = set()  # type: Set[str]
-    for path in sorted(paths, key=len):
-        should_skip = any(
-            path.startswith(shortpath.rstrip("*")) and
-            path[len(shortpath.rstrip("*").rstrip(sep))] == sep
-            for shortpath in short_paths
-        )
-        if not should_skip:
-            short_paths.add(path)
-    return short_paths
-
-
-def compress_for_rename(paths):
-    # type: (Iterable[str]) -> Set[str]
-    """Returns a set containing the paths that need to be renamed.
-
-    This set may include directories when the original sequence of paths
-    included every file on disk.
-    """
-    case_map = dict((os.path.normcase(p), p) for p in paths)
-    remaining = set(case_map)
-    unchecked = sorted(set(os.path.split(p)[0]
-                           for p in case_map.values()), key=len)
-    wildcards = set()  # type: Set[str]
-
-    def norm_join(*a):
-        # type: (str) -> str
-        return os.path.normcase(os.path.join(*a))
-
-    for root in unchecked:
-        if any(os.path.normcase(root).startswith(w)
-               for w in wildcards):
-            # This directory has already been handled.
-            continue
-
-        all_files = set()  # type: Set[str]
-        all_subdirs = set()  # type: Set[str]
-        for dirname, subdirs, files in os.walk(root):
-            all_subdirs.update(norm_join(root, dirname, d)
-                               for d in subdirs)
-            all_files.update(norm_join(root, dirname, f)
-                             for f in files)
-        # If all the files we found are in our remaining set of files to
-        # remove, then remove them from the latter set and add a wildcard
-        # for the directory.
-        if not (all_files - remaining):
-            remaining.difference_update(all_files)
-            wildcards.add(root + os.sep)
-
-    return set(map(case_map.__getitem__, remaining)) | wildcards
-
-
-def compress_for_output_listing(paths):
-    # type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
-    """Returns a tuple of 2 sets of which paths to display to user
-
-    The first set contains paths that would be deleted. Files of a package
-    are not added and the top-level directory of the package has a '*' added
-    at the end - to signify that all it's contents are removed.
-
-    The second set contains files that would have been skipped in the above
-    folders.
-    """
-
-    will_remove = set(paths)
-    will_skip = set()
-
-    # Determine folders and files
-    folders = set()
-    files = set()
-    for path in will_remove:
-        if path.endswith(".pyc"):
-            continue
-        if path.endswith("__init__.py") or ".dist-info" in path:
-            folders.add(os.path.dirname(path))
-        files.add(path)
-
-    # probably this one https://github.com/python/mypy/issues/390
-    _normcased_files = set(map(os.path.normcase, files))  # type: ignore
-
-    folders = compact(folders)
-
-    # This walks the tree using os.walk to not miss extra folders
-    # that might get added.
-    for folder in folders:
-        for dirpath, _, dirfiles in os.walk(folder):
-            for fname in dirfiles:
-                if fname.endswith(".pyc"):
-                    continue
-
-                file_ = os.path.join(dirpath, fname)
-                if (os.path.isfile(file_) and
-                        os.path.normcase(file_) not in _normcased_files):
-                    # We are skipping this file. Add it to the set.
-                    will_skip.add(file_)
-
-    will_remove = files | {
-        os.path.join(folder, "*") for folder in folders
-    }
-
-    return will_remove, will_skip
-
-
-class StashedUninstallPathSet(object):
-    """A set of file rename operations to stash files while
-    tentatively uninstalling them."""
-    def __init__(self):
-        # type: () -> None
-        # Mapping from source file root to [Adjacent]TempDirectory
-        # for files under that directory.
-        self._save_dirs = {}  # type: Dict[str, TempDirectory]
-        # (old path, new path) tuples for each move that may need
-        # to be undone.
-        self._moves = []  # type: List[Tuple[str, str]]
-
-    def _get_directory_stash(self, path):
-        # type: (str) -> str
-        """Stashes a directory.
-
-        Directories are stashed adjacent to their original location if
-        possible, or else moved/copied into the user's temp dir."""
-
-        try:
-            save_dir = AdjacentTempDirectory(path)  # type: TempDirectory
-        except OSError:
-            save_dir = TempDirectory(kind="uninstall")
-        self._save_dirs[os.path.normcase(path)] = save_dir
-
-        return save_dir.path
-
-    def _get_file_stash(self, path):
-        # type: (str) -> str
-        """Stashes a file.
-
-        If no root has been provided, one will be created for the directory
-        in the user's temp directory."""
-        path = os.path.normcase(path)
-        head, old_head = os.path.dirname(path), None
-        save_dir = None
-
-        while head != old_head:
-            try:
-                save_dir = self._save_dirs[head]
-                break
-            except KeyError:
-                pass
-            head, old_head = os.path.dirname(head), head
-        else:
-            # Did not find any suitable root
-            head = os.path.dirname(path)
-            save_dir = TempDirectory(kind='uninstall')
-            self._save_dirs[head] = save_dir
-
-        relpath = os.path.relpath(path, head)
-        if relpath and relpath != os.path.curdir:
-            return os.path.join(save_dir.path, relpath)
-        return save_dir.path
-
-    def stash(self, path):
-        # type: (str) -> str
-        """Stashes the directory or file and returns its new location.
-        Handle symlinks as files to avoid modifying the symlink targets.
-        """
-        path_is_dir = os.path.isdir(path) and not os.path.islink(path)
-        if path_is_dir:
-            new_path = self._get_directory_stash(path)
-        else:
-            new_path = self._get_file_stash(path)
-
-        self._moves.append((path, new_path))
-        if (path_is_dir and os.path.isdir(new_path)):
-            # If we're moving a directory, we need to
-            # remove the destination first or else it will be
-            # moved to inside the existing directory.
-            # We just created new_path ourselves, so it will
-            # be removable.
-            os.rmdir(new_path)
-        renames(path, new_path)
-        return new_path
-
-    def commit(self):
-        # type: () -> None
-        """Commits the uninstall by removing stashed files."""
-        for _, save_dir in self._save_dirs.items():
-            save_dir.cleanup()
-        self._moves = []
-        self._save_dirs = {}
-
-    def rollback(self):
-        # type: () -> None
-        """Undoes the uninstall by moving stashed files back."""
-        for p in self._moves:
-            logging.info("Moving to %s\n from %s", *p)
-
-        for new_path, path in self._moves:
-            try:
-                logger.debug('Replacing %s from %s', new_path, path)
-                if os.path.isfile(new_path) or os.path.islink(new_path):
-                    os.unlink(new_path)
-                elif os.path.isdir(new_path):
-                    rmtree(new_path)
-                renames(path, new_path)
-            except OSError as ex:
-                logger.error("Failed to restore %s", new_path)
-                logger.debug("Exception: %s", ex)
-
-        self.commit()
-
-    @property
-    def can_rollback(self):
-        # type: () -> bool
-        return bool(self._moves)
-
-
-class UninstallPathSet(object):
-    """A set of file paths to be removed in the uninstallation of a
-    requirement."""
-    def __init__(self, dist):
-        # type: (Distribution) -> None
-        self.paths = set()  # type: Set[str]
-        self._refuse = set()  # type: Set[str]
-        self.pth = {}  # type: Dict[str, UninstallPthEntries]
-        self.dist = dist
-        self._moved_paths = StashedUninstallPathSet()
-
-    def _permitted(self, path):
-        # type: (str) -> bool
-        """
-        Return True if the given path is one we are permitted to
-        remove/modify, False otherwise.
-
-        """
-        return is_local(path)
-
-    def add(self, path):
-        # type: (str) -> None
-        head, tail = os.path.split(path)
-
-        # we normalize the head to resolve parent directory symlinks, but not
-        # the tail, since we only want to uninstall symlinks, not their targets
-        path = os.path.join(normalize_path(head), os.path.normcase(tail))
-
-        if not os.path.exists(path):
-            return
-        if self._permitted(path):
-            self.paths.add(path)
-        else:
-            self._refuse.add(path)
-
-        # __pycache__ files can show up after 'installed-files.txt' is created,
-        # due to imports
-        if os.path.splitext(path)[1] == '.py' and uses_pycache:
-            self.add(cache_from_source(path))
-
-    def add_pth(self, pth_file, entry):
-        # type: (str, str) -> None
-        pth_file = normalize_path(pth_file)
-        if self._permitted(pth_file):
-            if pth_file not in self.pth:
-                self.pth[pth_file] = UninstallPthEntries(pth_file)
-            self.pth[pth_file].add(entry)
-        else:
-            self._refuse.add(pth_file)
-
-    def remove(self, auto_confirm=False, verbose=False):
-        # type: (bool, bool) -> None
-        """Remove paths in ``self.paths`` with confirmation (unless
-        ``auto_confirm`` is True)."""
-
-        if not self.paths:
-            logger.info(
-                "Can't uninstall '%s'. No files were found to uninstall.",
-                self.dist.project_name,
-            )
-            return
-
-        dist_name_version = (
-            self.dist.project_name + "-" + self.dist.version
-        )
-        logger.info('Uninstalling %s:', dist_name_version)
-
-        with indent_log():
-            if auto_confirm or self._allowed_to_proceed(verbose):
-                moved = self._moved_paths
-
-                for_rename = compress_for_rename(self.paths)
-
-                for path in sorted(compact(for_rename)):
-                    moved.stash(path)
-                    logger.debug('Removing file or directory %s', path)
-
-                for pth in self.pth.values():
-                    pth.remove()
-
-                logger.info('Successfully uninstalled %s', dist_name_version)
-
-    def _allowed_to_proceed(self, verbose):
-        # type: (bool) -> bool
-        """Display which files would be deleted and prompt for confirmation
-        """
-
-        def _display(msg, paths):
-            # type: (str, Iterable[str]) -> None
-            if not paths:
-                return
-
-            logger.info(msg)
-            with indent_log():
-                for path in sorted(compact(paths)):
-                    logger.info(path)
-
-        if not verbose:
-            will_remove, will_skip = compress_for_output_listing(self.paths)
-        else:
-            # In verbose mode, display all the files that are going to be
-            # deleted.
-            will_remove = set(self.paths)
-            will_skip = set()
-
-        _display('Would remove:', will_remove)
-        _display('Would not remove (might be manually added):', will_skip)
-        _display('Would not remove (outside of prefix):', self._refuse)
-        if verbose:
-            _display('Will actually move:', compress_for_rename(self.paths))
-
-        return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
-
-    def rollback(self):
-        # type: () -> None
-        """Rollback the changes previously made by remove()."""
-        if not self._moved_paths.can_rollback:
-            logger.error(
-                "Can't roll back %s; was not uninstalled",
-                self.dist.project_name,
-            )
-            return
-        logger.info('Rolling back uninstall of %s', self.dist.project_name)
-        self._moved_paths.rollback()
-        for pth in self.pth.values():
-            pth.rollback()
-
-    def commit(self):
-        # type: () -> None
-        """Remove temporary save dir: rollback will no longer be possible."""
-        self._moved_paths.commit()
-
-    @classmethod
-    def from_dist(cls, dist):
-        # type: (Distribution) -> UninstallPathSet
-        dist_path = normalize_path(dist.location)
-        if not dist_is_local(dist):
-            logger.info(
-                "Not uninstalling %s at %s, outside environment %s",
-                dist.key,
-                dist_path,
-                sys.prefix,
-            )
-            return cls(dist)
-
-        if dist_path in {p for p in {sysconfig.get_path("stdlib"),
-                                     sysconfig.get_path("platstdlib")}
-                         if p}:
-            logger.info(
-                "Not uninstalling %s at %s, as it is in the standard library.",
-                dist.key,
-                dist_path,
-            )
-            return cls(dist)
-
-        paths_to_remove = cls(dist)
-        develop_egg_link = egg_link_path(dist)
-        develop_egg_link_egg_info = '{}.egg-info'.format(
-            pkg_resources.to_filename(dist.project_name))
-        egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
-        # Special case for distutils installed package
-        distutils_egg_info = getattr(dist._provider, 'path', None)
-
-        # Uninstall cases order do matter as in the case of 2 installs of the
-        # same package, pip needs to uninstall the currently detected version
-        if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
-                not dist.egg_info.endswith(develop_egg_link_egg_info)):
-            # if dist.egg_info.endswith(develop_egg_link_egg_info), we
-            # are in fact in the develop_egg_link case
-            paths_to_remove.add(dist.egg_info)
-            if dist.has_metadata('installed-files.txt'):
-                for installed_file in dist.get_metadata(
-                        'installed-files.txt').splitlines():
-                    path = os.path.normpath(
-                        os.path.join(dist.egg_info, installed_file)
-                    )
-                    paths_to_remove.add(path)
-            # FIXME: need a test for this elif block
-            # occurs with --single-version-externally-managed/--record outside
-            # of pip
-            elif dist.has_metadata('top_level.txt'):
-                if dist.has_metadata('namespace_packages.txt'):
-                    namespaces = dist.get_metadata('namespace_packages.txt')
-                else:
-                    namespaces = []
-                for top_level_pkg in [
-                        p for p
-                        in dist.get_metadata('top_level.txt').splitlines()
-                        if p and p not in namespaces]:
-                    path = os.path.join(dist.location, top_level_pkg)
-                    paths_to_remove.add(path)
-                    paths_to_remove.add(path + '.py')
-                    paths_to_remove.add(path + '.pyc')
-                    paths_to_remove.add(path + '.pyo')
-
-        elif distutils_egg_info:
-            raise UninstallationError(
-                "Cannot uninstall {!r}. It is a distutils installed project "
-                "and thus we cannot accurately determine which files belong "
-                "to it which would lead to only a partial uninstall.".format(
-                    dist.project_name,
-                )
-            )
-
-        elif dist.location.endswith('.egg'):
-            # package installed by easy_install
-            # We cannot match on dist.egg_name because it can slightly vary
-            # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
-            paths_to_remove.add(dist.location)
-            easy_install_egg = os.path.split(dist.location)[1]
-            easy_install_pth = os.path.join(os.path.dirname(dist.location),
-                                            'easy-install.pth')
-            paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
-
-        elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
-            for path in uninstallation_paths(dist):
-                paths_to_remove.add(path)
-
-        elif develop_egg_link:
-            # develop egg
-            with open(develop_egg_link, 'r') as fh:
-                link_pointer = os.path.normcase(fh.readline().strip())
-            assert (link_pointer == dist.location), (
-                'Egg-link %s does not match installed location of %s '
-                '(at %s)' % (link_pointer, dist.project_name, dist.location)
-            )
-            paths_to_remove.add(develop_egg_link)
-            easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
-                                            'easy-install.pth')
-            paths_to_remove.add_pth(easy_install_pth, dist.location)
-
-        else:
-            logger.debug(
-                'Not sure how to uninstall: %s - Check: %s',
-                dist, dist.location,
-            )
-
-        # find distutils scripts= scripts
-        if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
-            for script in dist.metadata_listdir('scripts'):
-                if dist_in_usersite(dist):
-                    bin_dir = bin_user
-                else:
-                    bin_dir = bin_py
-                paths_to_remove.add(os.path.join(bin_dir, script))
-                if WINDOWS:
-                    paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
-
-        # find console_scripts
-        _scripts_to_remove = []
-        console_scripts = dist.get_entry_map(group='console_scripts')
-        for name in console_scripts.keys():
-            _scripts_to_remove.extend(_script_names(dist, name, False))
-        # find gui_scripts
-        gui_scripts = dist.get_entry_map(group='gui_scripts')
-        for name in gui_scripts.keys():
-            _scripts_to_remove.extend(_script_names(dist, name, True))
-
-        for s in _scripts_to_remove:
-            paths_to_remove.add(s)
-
-        return paths_to_remove
-
-
-class UninstallPthEntries(object):
-    def __init__(self, pth_file):
-        # type: (str) -> None
-        if not os.path.isfile(pth_file):
-            raise UninstallationError(
-                "Cannot remove entries from nonexistent file %s" % pth_file
-            )
-        self.file = pth_file
-        self.entries = set()  # type: Set[str]
-        self._saved_lines = None  # type: Optional[List[bytes]]
-
-    def add(self, entry):
-        # type: (str) -> None
-        entry = os.path.normcase(entry)
-        # On Windows, os.path.normcase converts the entry to use
-        # backslashes.  This is correct for entries that describe absolute
-        # paths outside of site-packages, but all the others use forward
-        # slashes.
-        # os.path.splitdrive is used instead of os.path.isabs because isabs
-        # treats non-absolute paths with drive letter markings like c:foo\bar
-        # as absolute paths. It also does not recognize UNC paths if they don't
-        # have more than "\\sever\share". Valid examples: "\\server\share\" or
-        # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive.
-        if WINDOWS and not os.path.splitdrive(entry)[0]:
-            entry = entry.replace('\\', '/')
-        self.entries.add(entry)
-
-    def remove(self):
-        # type: () -> None
-        logger.debug('Removing pth entries from %s:', self.file)
-        with open(self.file, 'rb') as fh:
-            # windows uses '\r\n' with py3k, but uses '\n' with py2.x
-            lines = fh.readlines()
-            self._saved_lines = lines
-        if any(b'\r\n' in line for line in lines):
-            endline = '\r\n'
-        else:
-            endline = '\n'
-        # handle missing trailing newline
-        if lines and not lines[-1].endswith(endline.encode("utf-8")):
-            lines[-1] = lines[-1] + endline.encode("utf-8")
-        for entry in self.entries:
-            try:
-                logger.debug('Removing entry: %s', entry)
-                lines.remove((entry + endline).encode("utf-8"))
-            except ValueError:
-                pass
-        with open(self.file, 'wb') as fh:
-            fh.writelines(lines)
-
-    def rollback(self):
-        # type: () -> bool
-        if self._saved_lines is None:
-            logger.error(
-                'Cannot roll back changes to %s, none were made', self.file
-            )
-            return False
-        logger.debug('Rolling %s back to previous state', self.file)
-        with open(self.file, 'wb') as fh:
-            fh.writelines(self._saved_lines)
-        return True
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/self_outdated_check.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/self_outdated_check.py
deleted file mode 100644
index 51ef343..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/self_outdated_check.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import datetime
-import hashlib
-import json
-import logging
-import os.path
-import sys
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging import version as packaging_version
-from pip._vendor.six import ensure_binary
-
-from pip._internal.collector import LinkCollector
-from pip._internal.index import PackageFinder
-from pip._internal.models.search_scope import SearchScope
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.filesystem import (
-    adjacent_tmp_file,
-    check_path_owner,
-    replace,
-)
-from pip._internal.utils.misc import (
-    ensure_dir,
-    get_installed_version,
-    redact_auth_from_url,
-)
-from pip._internal.utils.packaging import get_installer
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    import optparse
-    from optparse import Values
-    from typing import Any, Dict, Text, Union
-
-    from pip._internal.network.session import PipSession
-
-
-SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
-
-
-logger = logging.getLogger(__name__)
-
-
-def make_link_collector(
-    session,  # type: PipSession
-    options,  # type: Values
-    suppress_no_index=False,  # type: bool
-):
-    # type: (...) -> LinkCollector
-    """
-    :param session: The Session to use to make requests.
-    :param suppress_no_index: Whether to ignore the --no-index option
-        when constructing the SearchScope object.
-    """
-    index_urls = [options.index_url] + options.extra_index_urls
-    if options.no_index and not suppress_no_index:
-        logger.debug(
-            'Ignoring indexes: %s',
-            ','.join(redact_auth_from_url(url) for url in index_urls),
-        )
-        index_urls = []
-
-    # Make sure find_links is a list before passing to create().
-    find_links = options.find_links or []
-
-    search_scope = SearchScope.create(
-        find_links=find_links, index_urls=index_urls,
-    )
-
-    link_collector = LinkCollector(session=session, search_scope=search_scope)
-
-    return link_collector
-
-
-def _get_statefile_name(key):
-    # type: (Union[str, Text]) -> str
-    key_bytes = ensure_binary(key)
-    name = hashlib.sha224(key_bytes).hexdigest()
-    return name
-
-
-class SelfCheckState(object):
-    def __init__(self, cache_dir):
-        # type: (str) -> None
-        self.state = {}  # type: Dict[str, Any]
-        self.statefile_path = None
-
-        # Try to load the existing state
-        if cache_dir:
-            self.statefile_path = os.path.join(
-                cache_dir, "selfcheck", _get_statefile_name(self.key)
-            )
-            try:
-                with open(self.statefile_path) as statefile:
-                    self.state = json.load(statefile)
-            except (IOError, ValueError, KeyError):
-                # Explicitly suppressing exceptions, since we don't want to
-                # error out if the cache file is invalid.
-                pass
-
-    @property
-    def key(self):
-        return sys.prefix
-
-    def save(self, pypi_version, current_time):
-        # type: (str, datetime.datetime) -> None
-        # If we do not have a path to cache in, don't bother saving.
-        if not self.statefile_path:
-            return
-
-        # Check to make sure that we own the directory
-        if not check_path_owner(os.path.dirname(self.statefile_path)):
-            return
-
-        # Now that we've ensured the directory is owned by this user, we'll go
-        # ahead and make sure that all our directories are created.
-        ensure_dir(os.path.dirname(self.statefile_path))
-
-        state = {
-            # Include the key so it's easy to tell which pip wrote the
-            # file.
-            "key": self.key,
-            "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
-            "pypi_version": pypi_version,
-        }
-
-        text = json.dumps(state, sort_keys=True, separators=(",", ":"))
-
-        with adjacent_tmp_file(self.statefile_path) as f:
-            f.write(ensure_binary(text))
-
-        try:
-            # Since we have a prefix-specific state file, we can just
-            # overwrite whatever is there, no need to check.
-            replace(f.name, self.statefile_path)
-        except OSError:
-            # Best effort.
-            pass
-
-
-def was_installed_by_pip(pkg):
-    # type: (str) -> bool
-    """Checks whether pkg was installed by pip
-
-    This is used not to display the upgrade message when pip is in fact
-    installed by system package manager, such as dnf on Fedora.
-    """
-    try:
-        dist = pkg_resources.get_distribution(pkg)
-        return "pip" == get_installer(dist)
-    except pkg_resources.DistributionNotFound:
-        return False
-
-
-def pip_self_version_check(session, options):
-    # type: (PipSession, optparse.Values) -> None
-    """Check for an update for pip.
-
-    Limit the frequency of checks to once per week. State is stored either in
-    the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
-    of the pip script path.
-    """
-    installed_version = get_installed_version("pip")
-    if not installed_version:
-        return
-
-    pip_version = packaging_version.parse(installed_version)
-    pypi_version = None
-
-    try:
-        state = SelfCheckState(cache_dir=options.cache_dir)
-
-        current_time = datetime.datetime.utcnow()
-        # Determine if we need to refresh the state
-        if "last_check" in state.state and "pypi_version" in state.state:
-            last_check = datetime.datetime.strptime(
-                state.state["last_check"],
-                SELFCHECK_DATE_FMT
-            )
-            if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
-                pypi_version = state.state["pypi_version"]
-
-        # Refresh the version if we need to or just see if we need to warn
-        if pypi_version is None:
-            # Lets use PackageFinder to see what the latest pip version is
-            link_collector = make_link_collector(
-                session,
-                options=options,
-                suppress_no_index=True,
-            )
-
-            # Pass allow_yanked=False so we don't suggest upgrading to a
-            # yanked version.
-            selection_prefs = SelectionPreferences(
-                allow_yanked=False,
-                allow_all_prereleases=False,  # Explicitly set to False
-            )
-
-            finder = PackageFinder.create(
-                link_collector=link_collector,
-                selection_prefs=selection_prefs,
-            )
-            best_candidate = finder.find_best_candidate("pip").best_candidate
-            if best_candidate is None:
-                return
-            pypi_version = str(best_candidate.version)
-
-            # save that we've performed a check
-            state.save(pypi_version, current_time)
-
-        remote_version = packaging_version.parse(pypi_version)
-
-        local_version_is_older = (
-            pip_version < remote_version and
-            pip_version.base_version != remote_version.base_version and
-            was_installed_by_pip('pip')
-        )
-
-        # Determine if our pypi_version is older
-        if not local_version_is_older:
-            return
-
-        # Advise "python -m pip" on Windows to avoid issues
-        # with overwriting pip.exe.
-        if WINDOWS:
-            pip_cmd = "python -m pip"
-        else:
-            pip_cmd = "pip"
-        logger.warning(
-            "You are using pip version %s; however, version %s is "
-            "available.\nYou should consider upgrading via the "
-            "'%s install --upgrade pip' command.",
-            pip_version, pypi_version, pip_cmd
-        )
-    except Exception:
-        logger.debug(
-            "There was an error checking the latest version of pip",
-            exc_info=True,
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
deleted file mode 100644
index 06cd831..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/appdirs.py
+++ /dev/null
@@ -1,276 +0,0 @@
-"""
-This code was taken from https://github.com/ActiveState/appdirs and modified
-to suit our purposes.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import os
-import sys
-
-from pip._vendor.six import PY2, text_type
-
-from pip._internal.utils.compat import WINDOWS, expanduser
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import List
-
-
-def user_cache_dir(appname):
-    # type: (str) -> str
-    r"""
-    Return full path to the user-specific cache dir for this application.
-
-        "appname" is the name of application.
-
-    Typical user cache directories are:
-        macOS:      ~/Library/Caches/
-        Unix:       ~/.cache/ (XDG default)
-        Windows:    C:\Users\\AppData\Local\\Cache
-
-    On Windows the only suggestion in the MSDN docs is that local settings go
-    in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
-    non-roaming app data dir (the default returned by `user_data_dir`). Apps
-    typically put cache data somewhere *under* the given dir here. Some
-    examples:
-        ...\Mozilla\Firefox\Profiles\\Cache
-        ...\Acme\SuperApp\Cache\1.0
-
-    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
-    """
-    if WINDOWS:
-        # Get the base path
-        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-
-        # When using Python 2, return paths as bytes on Windows like we do on
-        # other operating systems. See helper function docs for more details.
-        if PY2 and isinstance(path, text_type):
-            path = _win_path_to_bytes(path)
-
-        # Add our app name and Cache directory to it
-        path = os.path.join(path, appname, "Cache")
-    elif sys.platform == "darwin":
-        # Get the base path
-        path = expanduser("~/Library/Caches")
-
-        # Add our app name to it
-        path = os.path.join(path, appname)
-    else:
-        # Get the base path
-        path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
-
-        # Add our app name to it
-        path = os.path.join(path, appname)
-
-    return path
-
-
-def user_data_dir(appname, roaming=False):
-    # type: (str, bool) -> str
-    r"""
-    Return full path to the user-specific data dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "roaming" (boolean, default False) can be set True to use the Windows
-            roaming appdata directory. That means that for users on a Windows
-            network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user data directories are:
-        macOS:                  ~/Library/Application Support/
-                                if it exists, else ~/.config/
-        Unix:                   ~/.local/share/    # or in
-                                $XDG_DATA_HOME, if defined
-        Win XP (not roaming):   C:\Documents and Settings\\ ...
-                                ...Application Data\
-        Win XP (roaming):       C:\Documents and Settings\\Local ...
-                                ...Settings\Application Data\
-        Win 7  (not roaming):   C:\\Users\\AppData\Local\
-        Win 7  (roaming):       C:\\Users\\AppData\Roaming\
-
-    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
-    That means, by default "~/.local/share/".
-    """
-    if WINDOWS:
-        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
-        path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
-    elif sys.platform == "darwin":
-        path = os.path.join(
-            expanduser('~/Library/Application Support/'),
-            appname,
-        ) if os.path.isdir(os.path.join(
-            expanduser('~/Library/Application Support/'),
-            appname,
-        )
-        ) else os.path.join(
-            expanduser('~/.config/'),
-            appname,
-        )
-    else:
-        path = os.path.join(
-            os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
-            appname,
-        )
-
-    return path
-
-
-def user_config_dir(appname, roaming=True):
-    # type: (str, bool) -> str
-    """Return full path to the user-specific config dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "roaming" (boolean, default True) can be set False to not use the
-            Windows roaming appdata directory. That means that for users on a
-            Windows network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user data directories are:
-        macOS:                  same as user_data_dir
-        Unix:                   ~/.config/
-        Win *:                  same as user_data_dir
-
-    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
-    That means, by default "~/.config/".
-    """
-    if WINDOWS:
-        path = user_data_dir(appname, roaming=roaming)
-    elif sys.platform == "darwin":
-        path = user_data_dir(appname)
-    else:
-        path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
-        path = os.path.join(path, appname)
-
-    return path
-
-
-# for the discussion regarding site_config_dirs locations
-# see 
-def site_config_dirs(appname):
-    # type: (str) -> List[str]
-    r"""Return a list of potential user-shared config dirs for this application.
-
-        "appname" is the name of application.
-
-    Typical user config directories are:
-        macOS:      /Library/Application Support//
-        Unix:       /etc or $XDG_CONFIG_DIRS[i]// for each value in
-                    $XDG_CONFIG_DIRS
-        Win XP:     C:\Documents and Settings\All Users\Application ...
-                    ...Data\\
-        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory
-                    on Vista.)
-        Win 7:      Hidden, but writeable on Win 7:
-                    C:\ProgramData\\
-    """
-    if WINDOWS:
-        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
-        pathlist = [os.path.join(path, appname)]
-    elif sys.platform == 'darwin':
-        pathlist = [os.path.join('/Library/Application Support', appname)]
-    else:
-        # try looking in $XDG_CONFIG_DIRS
-        xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
-        if xdg_config_dirs:
-            pathlist = [
-                os.path.join(expanduser(x), appname)
-                for x in xdg_config_dirs.split(os.pathsep)
-            ]
-        else:
-            pathlist = []
-
-        # always look in /etc directly as well
-        pathlist.append('/etc')
-
-    return pathlist
-
-
-# -- Windows support functions --
-
-def _get_win_folder_from_registry(csidl_name):
-    # type: (str) -> str
-    """
-    This is a fallback technique at best. I'm not sure if using the
-    registry for this guarantees us the correct answer for all CSIDL_*
-    names.
-    """
-    import _winreg
-
-    shell_folder_name = {
-        "CSIDL_APPDATA": "AppData",
-        "CSIDL_COMMON_APPDATA": "Common AppData",
-        "CSIDL_LOCAL_APPDATA": "Local AppData",
-    }[csidl_name]
-
-    key = _winreg.OpenKey(
-        _winreg.HKEY_CURRENT_USER,
-        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
-    )
-    directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
-    return directory
-
-
-def _get_win_folder_with_ctypes(csidl_name):
-    # type: (str) -> str
-    # On Python 2, ctypes.create_unicode_buffer().value returns "unicode",
-    # which isn't the same as str in the annotation above.
-    csidl_const = {
-        "CSIDL_APPDATA": 26,
-        "CSIDL_COMMON_APPDATA": 35,
-        "CSIDL_LOCAL_APPDATA": 28,
-    }[csidl_name]
-
-    buf = ctypes.create_unicode_buffer(1024)
-    windll = ctypes.windll  # type: ignore
-    windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
-    # Downgrade to short path name if have highbit chars. See
-    # .
-    has_high_char = False
-    for c in buf:
-        if ord(c) > 255:
-            has_high_char = True
-            break
-    if has_high_char:
-        buf2 = ctypes.create_unicode_buffer(1024)
-        if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
-            buf = buf2
-
-    # The type: ignore is explained under the type annotation for this function
-    return buf.value  # type: ignore
-
-
-if WINDOWS:
-    try:
-        import ctypes
-        _get_win_folder = _get_win_folder_with_ctypes
-    except ImportError:
-        _get_win_folder = _get_win_folder_from_registry
-
-
-def _win_path_to_bytes(path):
-    """Encode Windows paths to bytes. Only used on Python 2.
-
-    Motivation is to be consistent with other operating systems where paths
-    are also returned as bytes. This avoids problems mixing bytes and Unicode
-    elsewhere in the codebase. For more details and discussion see
-    .
-
-    If encoding using ASCII and MBCS fails, return the original Unicode path.
-    """
-    for encoding in ('ASCII', 'MBCS'):
-        try:
-            return path.encode(encoding)
-        except (UnicodeEncodeError, LookupError):
-            pass
-    return path
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/compat.py
deleted file mode 100644
index dbd8448..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/compat.py
+++ /dev/null
@@ -1,297 +0,0 @@
-"""Stuff that differs in different Python versions and platform
-distributions."""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import, division
-
-import codecs
-import locale
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor.six import PY2, text_type
-from pip._vendor.urllib3.util import IS_PYOPENSSL
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Text, Tuple, Union
-
-try:
-    import _ssl  # noqa
-except ImportError:
-    ssl = None
-else:
-    # This additional assignment was needed to prevent a mypy error.
-    ssl = _ssl
-
-try:
-    import ipaddress
-except ImportError:
-    try:
-        from pip._vendor import ipaddress  # type: ignore
-    except ImportError:
-        import ipaddr as ipaddress  # type: ignore
-        ipaddress.ip_address = ipaddress.IPAddress  # type: ignore
-        ipaddress.ip_network = ipaddress.IPNetwork  # type: ignore
-
-
-__all__ = [
-    "ipaddress", "uses_pycache", "console_to_str", "native_str",
-    "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
-    "get_extension_suffixes",
-]
-
-
-logger = logging.getLogger(__name__)
-
-HAS_TLS = (ssl is not None) or IS_PYOPENSSL
-
-if PY2:
-    import imp
-
-    try:
-        cache_from_source = imp.cache_from_source  # type: ignore
-    except AttributeError:
-        # does not use __pycache__
-        cache_from_source = None
-
-    uses_pycache = cache_from_source is not None
-else:
-    uses_pycache = True
-    from importlib.util import cache_from_source
-
-
-if PY2:
-    # In Python 2.7, backslashreplace exists
-    # but does not support use for decoding.
-    # We implement our own replace handler for this
-    # situation, so that we can consistently use
-    # backslash replacement for all versions.
-    def backslashreplace_decode_fn(err):
-        raw_bytes = (err.object[i] for i in range(err.start, err.end))
-        # Python 2 gave us characters - convert to numeric bytes
-        raw_bytes = (ord(b) for b in raw_bytes)
-        return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
-    codecs.register_error(
-        "backslashreplace_decode",
-        backslashreplace_decode_fn,
-    )
-    backslashreplace_decode = "backslashreplace_decode"
-else:
-    backslashreplace_decode = "backslashreplace"
-
-
-def str_to_display(data, desc=None):
-    # type: (Union[bytes, Text], Optional[str]) -> Text
-    """
-    For display or logging purposes, convert a bytes object (or text) to
-    text (e.g. unicode in Python 2) safe for output.
-
-    :param desc: An optional phrase describing the input data, for use in
-        the log message if a warning is logged. Defaults to "Bytes object".
-
-    This function should never error out and so can take a best effort
-    approach. It is okay to be lossy if needed since the return value is
-    just for display.
-
-    We assume the data is in the locale preferred encoding. If it won't
-    decode properly, we warn the user but decode as best we can.
-
-    We also ensure that the output can be safely written to standard output
-    without encoding errors.
-    """
-    if isinstance(data, text_type):
-        return data
-
-    # Otherwise, data is a bytes object (str in Python 2).
-    # First, get the encoding we assume. This is the preferred
-    # encoding for the locale, unless that is not found, or
-    # it is ASCII, in which case assume UTF-8
-    encoding = locale.getpreferredencoding()
-    if (not encoding) or codecs.lookup(encoding).name == "ascii":
-        encoding = "utf-8"
-
-    # Now try to decode the data - if we fail, warn the user and
-    # decode with replacement.
-    try:
-        decoded_data = data.decode(encoding)
-    except UnicodeDecodeError:
-        if desc is None:
-            desc = 'Bytes object'
-        msg_format = '{} does not appear to be encoded as %s'.format(desc)
-        logger.warning(msg_format, encoding)
-        decoded_data = data.decode(encoding, errors=backslashreplace_decode)
-
-    # Make sure we can print the output, by encoding it to the output
-    # encoding with replacement of unencodable characters, and then
-    # decoding again.
-    # We use stderr's encoding because it's less likely to be
-    # redirected and if we don't find an encoding we skip this
-    # step (on the assumption that output is wrapped by something
-    # that won't fail).
-    # The double getattr is to deal with the possibility that we're
-    # being called in a situation where sys.__stderr__ doesn't exist,
-    # or doesn't have an encoding attribute. Neither of these cases
-    # should occur in normal pip use, but there's no harm in checking
-    # in case people use pip in (unsupported) unusual situations.
-    output_encoding = getattr(getattr(sys, "__stderr__", None),
-                              "encoding", None)
-
-    if output_encoding:
-        output_encoded = decoded_data.encode(
-            output_encoding,
-            errors="backslashreplace"
-        )
-        decoded_data = output_encoded.decode(output_encoding)
-
-    return decoded_data
-
-
-def console_to_str(data):
-    # type: (bytes) -> Text
-    """Return a string, safe for output, of subprocess output.
-    """
-    return str_to_display(data, desc='Subprocess output')
-
-
-if PY2:
-    def native_str(s, replace=False):
-        # type: (str, bool) -> str
-        # Replace is ignored -- unicode to UTF-8 can't fail
-        if isinstance(s, text_type):
-            return s.encode('utf-8')
-        return s
-
-else:
-    def native_str(s, replace=False):
-        # type: (str, bool) -> str
-        if isinstance(s, bytes):
-            return s.decode('utf-8', 'replace' if replace else 'strict')
-        return s
-
-
-def get_path_uid(path):
-    # type: (str) -> int
-    """
-    Return path's uid.
-
-    Does not follow symlinks:
-        https://github.com/pypa/pip/pull/935#discussion_r5307003
-
-    Placed this function in compat due to differences on AIX and
-    Jython, that should eventually go away.
-
-    :raises OSError: When path is a symlink or can't be read.
-    """
-    if hasattr(os, 'O_NOFOLLOW'):
-        fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
-        file_uid = os.fstat(fd).st_uid
-        os.close(fd)
-    else:  # AIX and Jython
-        # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
-        if not os.path.islink(path):
-            # older versions of Jython don't have `os.fstat`
-            file_uid = os.stat(path).st_uid
-        else:
-            # raise OSError for parity with os.O_NOFOLLOW above
-            raise OSError(
-                "%s is a symlink; Will not return uid for symlinks" % path
-            )
-    return file_uid
-
-
-if PY2:
-    from imp import get_suffixes
-
-    def get_extension_suffixes():
-        return [suffix[0] for suffix in get_suffixes()]
-
-else:
-    from importlib.machinery import EXTENSION_SUFFIXES
-
-    def get_extension_suffixes():
-        return EXTENSION_SUFFIXES
-
-
-def expanduser(path):
-    # type: (str) -> str
-    """
-    Expand ~ and ~user constructions.
-
-    Includes a workaround for https://bugs.python.org/issue14768
-    """
-    expanded = os.path.expanduser(path)
-    if path.startswith('~/') and expanded.startswith('//'):
-        expanded = expanded[1:]
-    return expanded
-
-
-# packages in the stdlib that may have installation metadata, but should not be
-# considered 'installed'.  this theoretically could be determined based on
-# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
-# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
-# make this ineffective, so hard-coding
-stdlib_pkgs = {"python", "wsgiref", "argparse"}
-
-
-# windows detection, covers cpython and ironpython
-WINDOWS = (sys.platform.startswith("win") or
-           (sys.platform == 'cli' and os.name == 'nt'))
-
-
-def samefile(file1, file2):
-    # type: (str, str) -> bool
-    """Provide an alternative for os.path.samefile on Windows/Python2"""
-    if hasattr(os.path, 'samefile'):
-        return os.path.samefile(file1, file2)
-    else:
-        path1 = os.path.normcase(os.path.abspath(file1))
-        path2 = os.path.normcase(os.path.abspath(file2))
-        return path1 == path2
-
-
-if hasattr(shutil, 'get_terminal_size'):
-    def get_terminal_size():
-        # type: () -> Tuple[int, int]
-        """
-        Returns a tuple (x, y) representing the width(x) and the height(y)
-        in characters of the terminal window.
-        """
-        return tuple(shutil.get_terminal_size())  # type: ignore
-else:
-    def get_terminal_size():
-        # type: () -> Tuple[int, int]
-        """
-        Returns a tuple (x, y) representing the width(x) and the height(y)
-        in characters of the terminal window.
-        """
-        def ioctl_GWINSZ(fd):
-            try:
-                import fcntl
-                import termios
-                import struct
-                cr = struct.unpack_from(
-                    'hh',
-                    fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
-                )
-            except Exception:
-                return None
-            if cr == (0, 0):
-                return None
-            return cr
-        cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
-        if not cr:
-            try:
-                fd = os.open(os.ctermid(), os.O_RDONLY)
-                cr = ioctl_GWINSZ(fd)
-                os.close(fd)
-            except Exception:
-                pass
-        if not cr:
-            cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
-        return int(cr[1]), int(cr[0])
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
deleted file mode 100644
index 2f20cfd..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/deprecation.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-A module that implements tooling to enable easy warnings about deprecations.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import warnings
-
-from pip._vendor.packaging.version import parse
-
-from pip import __version__ as current_version
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, Optional
-
-
-DEPRECATION_MSG_PREFIX = "DEPRECATION: "
-
-
-class PipDeprecationWarning(Warning):
-    pass
-
-
-_original_showwarning = None  # type: Any
-
-
-# Warnings <-> Logging Integration
-def _showwarning(message, category, filename, lineno, file=None, line=None):
-    if file is not None:
-        if _original_showwarning is not None:
-            _original_showwarning(
-                message, category, filename, lineno, file, line,
-            )
-    elif issubclass(category, PipDeprecationWarning):
-        # We use a specially named logger which will handle all of the
-        # deprecation messages for pip.
-        logger = logging.getLogger("pip._internal.deprecations")
-        logger.warning(message)
-    else:
-        _original_showwarning(
-            message, category, filename, lineno, file, line,
-        )
-
-
-def install_warning_logger():
-    # type: () -> None
-    # Enable our Deprecation Warnings
-    warnings.simplefilter("default", PipDeprecationWarning, append=True)
-
-    global _original_showwarning
-
-    if _original_showwarning is None:
-        _original_showwarning = warnings.showwarning
-        warnings.showwarning = _showwarning
-
-
-def deprecated(reason, replacement, gone_in, issue=None):
-    # type: (str, Optional[str], Optional[str], Optional[int]) -> None
-    """Helper to deprecate existing functionality.
-
-    reason:
-        Textual reason shown to the user about why this functionality has
-        been deprecated.
-    replacement:
-        Textual suggestion shown to the user about what alternative
-        functionality they can use.
-    gone_in:
-        The version of pip does this functionality should get removed in.
-        Raises errors if pip's current version is greater than or equal to
-        this.
-    issue:
-        Issue number on the tracker that would serve as a useful place for
-        users to find related discussion and provide feedback.
-
-    Always pass replacement, gone_in and issue as keyword arguments for clarity
-    at the call site.
-    """
-
-    # Construct a nice message.
-    #   This is eagerly formatted as we want it to get logged as if someone
-    #   typed this entire message out.
-    sentences = [
-        (reason, DEPRECATION_MSG_PREFIX + "{}"),
-        (gone_in, "pip {} will remove support for this functionality."),
-        (replacement, "A possible replacement is {}."),
-        (issue, (
-            "You can find discussion regarding this at "
-            "https://github.com/pypa/pip/issues/{}."
-        )),
-    ]
-    message = " ".join(
-        template.format(val) for val, template in sentences if val is not None
-    )
-
-    # Raise as an error if it has to be removed.
-    if gone_in is not None and parse(current_version) >= parse(gone_in):
-        raise PipDeprecationWarning(message)
-
-    warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/encoding.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
deleted file mode 100644
index ab4d4b9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/encoding.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import codecs
-import locale
-import re
-import sys
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import List, Tuple, Text
-
-BOMS = [
-    (codecs.BOM_UTF8, 'utf-8'),
-    (codecs.BOM_UTF16, 'utf-16'),
-    (codecs.BOM_UTF16_BE, 'utf-16-be'),
-    (codecs.BOM_UTF16_LE, 'utf-16-le'),
-    (codecs.BOM_UTF32, 'utf-32'),
-    (codecs.BOM_UTF32_BE, 'utf-32-be'),
-    (codecs.BOM_UTF32_LE, 'utf-32-le'),
-]  # type: List[Tuple[bytes, Text]]
-
-ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
-
-
-def auto_decode(data):
-    # type: (bytes) -> Text
-    """Check a bytes string for a BOM to correctly detect the encoding
-
-    Fallback to locale.getpreferredencoding(False) like open() on Python3"""
-    for bom, encoding in BOMS:
-        if data.startswith(bom):
-            return data[len(bom):].decode(encoding)
-    # Lets check the first two lines as in PEP263
-    for line in data.split(b'\n')[:2]:
-        if line[0:1] == b'#' and ENCODING_RE.search(line):
-            encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
-            return data.decode(encoding)
-    return data.decode(
-        locale.getpreferredencoding(False) or sys.getdefaultencoding(),
-    )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
deleted file mode 100644
index f4a389c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filesystem.py
+++ /dev/null
@@ -1,115 +0,0 @@
-import os
-import os.path
-import shutil
-import stat
-from contextlib import contextmanager
-from tempfile import NamedTemporaryFile
-
-# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import.
-from pip._vendor.retrying import retry  # type: ignore
-from pip._vendor.six import PY2
-
-from pip._internal.utils.compat import get_path_uid
-from pip._internal.utils.misc import cast
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import BinaryIO, Iterator
-
-    class NamedTemporaryFileResult(BinaryIO):
-        @property
-        def file(self):
-            # type: () -> BinaryIO
-            pass
-
-
-def check_path_owner(path):
-    # type: (str) -> bool
-    # If we don't have a way to check the effective uid of this process, then
-    # we'll just assume that we own the directory.
-    if not hasattr(os, "geteuid"):
-        return True
-
-    previous = None
-    while path != previous:
-        if os.path.lexists(path):
-            # Check if path is writable by current user.
-            if os.geteuid() == 0:
-                # Special handling for root user in order to handle properly
-                # cases where users use sudo without -H flag.
-                try:
-                    path_uid = get_path_uid(path)
-                except OSError:
-                    return False
-                return path_uid == 0
-            else:
-                return os.access(path, os.W_OK)
-        else:
-            previous, path = path, os.path.dirname(path)
-    return False  # assume we don't own the path
-
-
-def copy2_fixed(src, dest):
-    # type: (str, str) -> None
-    """Wrap shutil.copy2() but map errors copying socket files to
-    SpecialFileError as expected.
-
-    See also https://bugs.python.org/issue37700.
-    """
-    try:
-        shutil.copy2(src, dest)
-    except (OSError, IOError):
-        for f in [src, dest]:
-            try:
-                is_socket_file = is_socket(f)
-            except OSError:
-                # An error has already occurred. Another error here is not
-                # a problem and we can ignore it.
-                pass
-            else:
-                if is_socket_file:
-                    raise shutil.SpecialFileError("`%s` is a socket" % f)
-
-        raise
-
-
-def is_socket(path):
-    # type: (str) -> bool
-    return stat.S_ISSOCK(os.lstat(path).st_mode)
-
-
-@contextmanager
-def adjacent_tmp_file(path):
-    # type: (str) -> Iterator[NamedTemporaryFileResult]
-    """Given a path to a file, open a temp file next to it securely and ensure
-    it is written to disk after the context reaches its end.
-    """
-    with NamedTemporaryFile(
-        delete=False,
-        dir=os.path.dirname(path),
-        prefix=os.path.basename(path),
-        suffix='.tmp',
-    ) as f:
-        result = cast('NamedTemporaryFileResult', f)
-        try:
-            yield result
-        finally:
-            result.file.flush()
-            os.fsync(result.file.fileno())
-
-
-_replace_retry = retry(stop_max_delay=1000, wait_fixed=250)
-
-if PY2:
-    @_replace_retry
-    def replace(src, dest):
-        # type: (str, str) -> None
-        try:
-            os.rename(src, dest)
-        except OSError:
-            os.remove(dest)
-            os.rename(src, dest)
-
-else:
-    replace = _replace_retry(os.replace)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filetypes.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filetypes.py
deleted file mode 100644
index daa0ca7..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/filetypes.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""Filetype information.
-"""
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Tuple
-
-WHEEL_EXTENSION = '.whl'
-BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')  # type: Tuple[str, ...]
-XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz',
-                 '.tar.lz', '.tar.lzma')  # type: Tuple[str, ...]
-ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION)  # type: Tuple[str, ...]
-TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')  # type: Tuple[str, ...]
-ARCHIVE_EXTENSIONS = (
-    ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
-)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/glibc.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
deleted file mode 100644
index 544b4c2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/glibc.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import os
-import re
-import warnings
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Tuple
-
-
-def glibc_version_string():
-    # type: () -> Optional[str]
-    "Returns glibc version string, or None if not using glibc."
-    return glibc_version_string_confstr() or glibc_version_string_ctypes()
-
-
-def glibc_version_string_confstr():
-    # type: () -> Optional[str]
-    "Primary implementation of glibc_version_string using os.confstr."
-    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
-    # to be broken or missing. This strategy is used in the standard library
-    # platform module:
-    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
-    try:
-        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
-        _, version = os.confstr("CS_GNU_LIBC_VERSION").split()
-    except (AttributeError, OSError, ValueError):
-        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
-        return None
-    return version
-
-
-def glibc_version_string_ctypes():
-    # type: () -> Optional[str]
-    "Fallback implementation of glibc_version_string using ctypes."
-
-    try:
-        import ctypes
-    except ImportError:
-        return None
-
-    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
-    # manpage says, "If filename is NULL, then the returned handle is for the
-    # main program". This way we can let the linker do the work to figure out
-    # which libc our process is actually using.
-    process_namespace = ctypes.CDLL(None)
-    try:
-        gnu_get_libc_version = process_namespace.gnu_get_libc_version
-    except AttributeError:
-        # Symbol doesn't exist -> therefore, we are not linked to
-        # glibc.
-        return None
-
-    # Call gnu_get_libc_version, which returns a string like "2.5"
-    gnu_get_libc_version.restype = ctypes.c_char_p
-    version_str = gnu_get_libc_version()
-    # py2 / py3 compatibility:
-    if not isinstance(version_str, str):
-        version_str = version_str.decode("ascii")
-
-    return version_str
-
-
-# Separated out from have_compatible_glibc for easier unit testing
-def check_glibc_version(version_str, required_major, minimum_minor):
-    # type: (str, int, int) -> bool
-    # Parse string and check against requested version.
-    #
-    # We use a regexp instead of str.split because we want to discard any
-    # random junk that might come after the minor version -- this might happen
-    # in patched/forked versions of glibc (e.g. Linaro's version of glibc
-    # uses version strings like "2.20-2014.11"). See gh-3588.
-    m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str)
-    if not m:
-        warnings.warn("Expected glibc version with 2 components major.minor,"
-                      " got: %s" % version_str, RuntimeWarning)
-        return False
-    return (int(m.group("major")) == required_major and
-            int(m.group("minor")) >= minimum_minor)
-
-
-def have_compatible_glibc(required_major, minimum_minor):
-    # type: (int, int) -> bool
-    version_str = glibc_version_string()
-    if version_str is None:
-        return False
-    return check_glibc_version(version_str, required_major, minimum_minor)
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-#   ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-#   ('glibc', '2.7')
-#   ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-#   ('glibc', '2.9')
-#
-# But the truth is:
-#
-#   ~$ ldd --version
-#   ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver():
-    # type: () -> Tuple[str, str]
-    """Try to determine the glibc version
-
-    Returns a tuple of strings (lib, version) which default to empty strings
-    in case the lookup fails.
-    """
-    glibc_version = glibc_version_string()
-    if glibc_version is None:
-        return ("", "")
-    else:
-        return ("glibc", glibc_version)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/hashes.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
deleted file mode 100644
index a0d87a4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/hashes.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import hashlib
-
-from pip._vendor.six import iteritems, iterkeys, itervalues
-
-from pip._internal.exceptions import (
-    HashMismatch,
-    HashMissing,
-    InstallationError,
-)
-from pip._internal.utils.misc import read_chunks
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Dict, List, BinaryIO, NoReturn, Iterator
-    )
-    from pip._vendor.six import PY3
-    if PY3:
-        from hashlib import _Hash
-    else:
-        from hashlib import _hash as _Hash
-
-
-# The recommended hash algo of the moment. Change this whenever the state of
-# the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = 'sha256'
-
-
-# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
-# Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ['sha256', 'sha384', 'sha512']
-
-
-class Hashes(object):
-    """A wrapper that builds multiple hashes at once and checks them against
-    known-good values
-
-    """
-    def __init__(self, hashes=None):
-        # type: (Dict[str, List[str]]) -> None
-        """
-        :param hashes: A dict of algorithm names pointing to lists of allowed
-            hex digests
-        """
-        self._allowed = {} if hashes is None else hashes
-
-    @property
-    def digest_count(self):
-        # type: () -> int
-        return sum(len(digests) for digests in self._allowed.values())
-
-    def is_hash_allowed(
-        self,
-        hash_name,   # type: str
-        hex_digest,  # type: str
-    ):
-        """Return whether the given hex digest is allowed."""
-        return hex_digest in self._allowed.get(hash_name, [])
-
-    def check_against_chunks(self, chunks):
-        # type: (Iterator[bytes]) -> None
-        """Check good hashes against ones built from iterable of chunks of
-        data.
-
-        Raise HashMismatch if none match.
-
-        """
-        gots = {}
-        for hash_name in iterkeys(self._allowed):
-            try:
-                gots[hash_name] = hashlib.new(hash_name)
-            except (ValueError, TypeError):
-                raise InstallationError('Unknown hash name: %s' % hash_name)
-
-        for chunk in chunks:
-            for hash in itervalues(gots):
-                hash.update(chunk)
-
-        for hash_name, got in iteritems(gots):
-            if got.hexdigest() in self._allowed[hash_name]:
-                return
-        self._raise(gots)
-
-    def _raise(self, gots):
-        # type: (Dict[str, _Hash]) -> NoReturn
-        raise HashMismatch(self._allowed, gots)
-
-    def check_against_file(self, file):
-        # type: (BinaryIO) -> None
-        """Check good hashes against a file-like object
-
-        Raise HashMismatch if none match.
-
-        """
-        return self.check_against_chunks(read_chunks(file))
-
-    def check_against_path(self, path):
-        # type: (str) -> None
-        with open(path, 'rb') as file:
-            return self.check_against_file(file)
-
-    def __nonzero__(self):
-        # type: () -> bool
-        """Return whether I know any known-good hashes."""
-        return bool(self._allowed)
-
-    def __bool__(self):
-        # type: () -> bool
-        return self.__nonzero__()
-
-
-class MissingHashes(Hashes):
-    """A workalike for Hashes used when we're missing a hash for a requirement
-
-    It computes the actual hash of the requirement and raises a HashMissing
-    exception showing it to the user.
-
-    """
-    def __init__(self):
-        # type: () -> None
-        """Don't offer the ``hashes`` kwarg."""
-        # Pass our favorite hash in to generate a "gotten hash". With the
-        # empty list, it will never match, so an error will always raise.
-        super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
-
-    def _raise(self, gots):
-        # type: (Dict[str, _Hash]) -> NoReturn
-        raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/inject_securetransport.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/inject_securetransport.py
deleted file mode 100644
index 5b93b1d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/inject_securetransport.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""A helper module that injects SecureTransport, on import.
-
-The import should be done as early as possible, to ensure all requests and
-sessions (or whatever) are created after injecting SecureTransport.
-
-Note that we only do the injection on macOS, when the linked OpenSSL is too
-old to handle TLSv1.2.
-"""
-
-import sys
-
-
-def inject_securetransport():
-    # type: () -> None
-    # Only relevant on macOS
-    if sys.platform != "darwin":
-        return
-
-    try:
-        import ssl
-    except ImportError:
-        return
-
-    # Checks for OpenSSL 1.0.1
-    if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f:
-        return
-
-    try:
-        from pip._vendor.urllib3.contrib import securetransport
-    except (ImportError, OSError):
-        return
-
-    securetransport.inject_into_urllib3()
-
-
-inject_securetransport()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/logging.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/logging.py
deleted file mode 100644
index 7767111..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/logging.py
+++ /dev/null
@@ -1,398 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import logging
-import logging.handlers
-import os
-import sys
-from logging import Filter, getLogger
-
-from pip._vendor.six import PY2
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
-from pip._internal.utils.misc import ensure_dir
-
-try:
-    import threading
-except ImportError:
-    import dummy_threading as threading  # type: ignore
-
-
-try:
-    # Use "import as" and set colorama in the else clause to avoid mypy
-    # errors and get the following correct revealed type for colorama:
-    # `Union[_importlib_modulespec.ModuleType, None]`
-    # Otherwise, we get an error like the following in the except block:
-    #  > Incompatible types in assignment (expression has type "None",
-    #   variable has type Module)
-    # TODO: eliminate the need to use "import as" once mypy addresses some
-    #  of its issues with conditional imports. Here is an umbrella issue:
-    #  https://github.com/python/mypy/issues/1297
-    from pip._vendor import colorama as _colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
-    colorama = None
-else:
-    # Import Fore explicitly rather than accessing below as colorama.Fore
-    # to avoid the following error running mypy:
-    # > Module has no attribute "Fore"
-    # TODO: eliminate the need to import Fore once mypy addresses some of its
-    #  issues with conditional imports. This particular case could be an
-    #  instance of the following issue (but also see the umbrella issue above):
-    #  https://github.com/python/mypy/issues/3500
-    from pip._vendor.colorama import Fore
-
-    colorama = _colorama
-
-
-_log_state = threading.local()
-_log_state.indentation = 0
-subprocess_logger = getLogger('pip.subprocessor')
-
-
-class BrokenStdoutLoggingError(Exception):
-    """
-    Raised if BrokenPipeError occurs for the stdout stream while logging.
-    """
-    pass
-
-
-# BrokenPipeError does not exist in Python 2 and, in addition, manifests
-# differently in Windows and non-Windows.
-if WINDOWS:
-    # In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
-    # https://bugs.python.org/issue19612
-    # https://bugs.python.org/issue30418
-    if PY2:
-        def _is_broken_pipe_error(exc_class, exc):
-            """See the docstring for non-Windows Python 3 below."""
-            return (exc_class is IOError and
-                    exc.errno in (errno.EINVAL, errno.EPIPE))
-    else:
-        # In Windows, a broken pipe IOError became OSError in Python 3.
-        def _is_broken_pipe_error(exc_class, exc):
-            """See the docstring for non-Windows Python 3 below."""
-            return ((exc_class is BrokenPipeError) or  # noqa: F821
-                    (exc_class is OSError and
-                     exc.errno in (errno.EINVAL, errno.EPIPE)))
-elif PY2:
-    def _is_broken_pipe_error(exc_class, exc):
-        """See the docstring for non-Windows Python 3 below."""
-        return (exc_class is IOError and exc.errno == errno.EPIPE)
-else:
-    # Then we are in the non-Windows Python 3 case.
-    def _is_broken_pipe_error(exc_class, exc):
-        """
-        Return whether an exception is a broken pipe error.
-
-        Args:
-          exc_class: an exception class.
-          exc: an exception instance.
-        """
-        return (exc_class is BrokenPipeError)  # noqa: F821
-
-
-@contextlib.contextmanager
-def indent_log(num=2):
-    """
-    A context manager which will cause the log output to be indented for any
-    log messages emitted inside it.
-    """
-    _log_state.indentation += num
-    try:
-        yield
-    finally:
-        _log_state.indentation -= num
-
-
-def get_indentation():
-    return getattr(_log_state, 'indentation', 0)
-
-
-class IndentingFormatter(logging.Formatter):
-
-    def __init__(self, *args, **kwargs):
-        """
-        A logging.Formatter that obeys the indent_log() context manager.
-
-        :param add_timestamp: A bool indicating output lines should be prefixed
-            with their record's timestamp.
-        """
-        self.add_timestamp = kwargs.pop("add_timestamp", False)
-        super(IndentingFormatter, self).__init__(*args, **kwargs)
-
-    def get_message_start(self, formatted, levelno):
-        """
-        Return the start of the formatted log message (not counting the
-        prefix to add to each line).
-        """
-        if levelno < logging.WARNING:
-            return ''
-        if formatted.startswith(DEPRECATION_MSG_PREFIX):
-            # Then the message already has a prefix.  We don't want it to
-            # look like "WARNING: DEPRECATION: ...."
-            return ''
-        if levelno < logging.ERROR:
-            return 'WARNING: '
-
-        return 'ERROR: '
-
-    def format(self, record):
-        """
-        Calls the standard formatter, but will indent all of the log message
-        lines by our current indentation level.
-        """
-        formatted = super(IndentingFormatter, self).format(record)
-        message_start = self.get_message_start(formatted, record.levelno)
-        formatted = message_start + formatted
-
-        prefix = ''
-        if self.add_timestamp:
-            # TODO: Use Formatter.default_time_format after dropping PY2.
-            t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
-            prefix = '%s,%03d ' % (t, record.msecs)
-        prefix += " " * get_indentation()
-        formatted = "".join([
-            prefix + line
-            for line in formatted.splitlines(True)
-        ])
-        return formatted
-
-
-def _color_wrap(*colors):
-    def wrapped(inp):
-        return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
-    return wrapped
-
-
-class ColorizedStreamHandler(logging.StreamHandler):
-
-    # Don't build up a list of colors if we don't have colorama
-    if colorama:
-        COLORS = [
-            # This needs to be in order from highest logging level to lowest.
-            (logging.ERROR, _color_wrap(Fore.RED)),
-            (logging.WARNING, _color_wrap(Fore.YELLOW)),
-        ]
-    else:
-        COLORS = []
-
-    def __init__(self, stream=None, no_color=None):
-        logging.StreamHandler.__init__(self, stream)
-        self._no_color = no_color
-
-        if WINDOWS and colorama:
-            self.stream = colorama.AnsiToWin32(self.stream)
-
-    def _using_stdout(self):
-        """
-        Return whether the handler is using sys.stdout.
-        """
-        if WINDOWS and colorama:
-            # Then self.stream is an AnsiToWin32 object.
-            return self.stream.wrapped is sys.stdout
-
-        return self.stream is sys.stdout
-
-    def should_color(self):
-        # Don't colorize things if we do not have colorama or if told not to
-        if not colorama or self._no_color:
-            return False
-
-        real_stream = (
-            self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
-            else self.stream.wrapped
-        )
-
-        # If the stream is a tty we should color it
-        if hasattr(real_stream, "isatty") and real_stream.isatty():
-            return True
-
-        # If we have an ANSI term we should color it
-        if os.environ.get("TERM") == "ANSI":
-            return True
-
-        # If anything else we should not color it
-        return False
-
-    def format(self, record):
-        msg = logging.StreamHandler.format(self, record)
-
-        if self.should_color():
-            for level, color in self.COLORS:
-                if record.levelno >= level:
-                    msg = color(msg)
-                    break
-
-        return msg
-
-    # The logging module says handleError() can be customized.
-    def handleError(self, record):
-        exc_class, exc = sys.exc_info()[:2]
-        # If a broken pipe occurred while calling write() or flush() on the
-        # stdout stream in logging's Handler.emit(), then raise our special
-        # exception so we can handle it in main() instead of logging the
-        # broken pipe error and continuing.
-        if (exc_class and self._using_stdout() and
-                _is_broken_pipe_error(exc_class, exc)):
-            raise BrokenStdoutLoggingError()
-
-        return super(ColorizedStreamHandler, self).handleError(record)
-
-
-class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
-
-    def _open(self):
-        ensure_dir(os.path.dirname(self.baseFilename))
-        return logging.handlers.RotatingFileHandler._open(self)
-
-
-class MaxLevelFilter(Filter):
-
-    def __init__(self, level):
-        self.level = level
-
-    def filter(self, record):
-        return record.levelno < self.level
-
-
-class ExcludeLoggerFilter(Filter):
-
-    """
-    A logging Filter that excludes records from a logger (or its children).
-    """
-
-    def filter(self, record):
-        # The base Filter class allows only records from a logger (or its
-        # children).
-        return not super(ExcludeLoggerFilter, self).filter(record)
-
-
-def setup_logging(verbosity, no_color, user_log_file):
-    """Configures and sets up all of the logging
-
-    Returns the requested logging level, as its integer value.
-    """
-
-    # Determine the level to be logging at.
-    if verbosity >= 1:
-        level = "DEBUG"
-    elif verbosity == -1:
-        level = "WARNING"
-    elif verbosity == -2:
-        level = "ERROR"
-    elif verbosity <= -3:
-        level = "CRITICAL"
-    else:
-        level = "INFO"
-
-    level_number = getattr(logging, level)
-
-    # The "root" logger should match the "console" level *unless* we also need
-    # to log to a user log file.
-    include_user_log = user_log_file is not None
-    if include_user_log:
-        additional_log_file = user_log_file
-        root_level = "DEBUG"
-    else:
-        additional_log_file = "/dev/null"
-        root_level = level
-
-    # Disable any logging besides WARNING unless we have DEBUG level logging
-    # enabled for vendored libraries.
-    vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
-
-    # Shorthands for clarity
-    log_streams = {
-        "stdout": "ext://sys.stdout",
-        "stderr": "ext://sys.stderr",
-    }
-    handler_classes = {
-        "stream": "pip._internal.utils.logging.ColorizedStreamHandler",
-        "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
-    }
-    handlers = ["console", "console_errors", "console_subprocess"] + (
-        ["user_log"] if include_user_log else []
-    )
-
-    logging.config.dictConfig({
-        "version": 1,
-        "disable_existing_loggers": False,
-        "filters": {
-            "exclude_warnings": {
-                "()": "pip._internal.utils.logging.MaxLevelFilter",
-                "level": logging.WARNING,
-            },
-            "restrict_to_subprocess": {
-                "()": "logging.Filter",
-                "name": subprocess_logger.name,
-            },
-            "exclude_subprocess": {
-                "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
-                "name": subprocess_logger.name,
-            },
-        },
-        "formatters": {
-            "indent": {
-                "()": IndentingFormatter,
-                "format": "%(message)s",
-            },
-            "indent_with_timestamp": {
-                "()": IndentingFormatter,
-                "format": "%(message)s",
-                "add_timestamp": True,
-            },
-        },
-        "handlers": {
-            "console": {
-                "level": level,
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stdout"],
-                "filters": ["exclude_subprocess", "exclude_warnings"],
-                "formatter": "indent",
-            },
-            "console_errors": {
-                "level": "WARNING",
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stderr"],
-                "filters": ["exclude_subprocess"],
-                "formatter": "indent",
-            },
-            # A handler responsible for logging to the console messages
-            # from the "subprocessor" logger.
-            "console_subprocess": {
-                "level": level,
-                "class": handler_classes["stream"],
-                "no_color": no_color,
-                "stream": log_streams["stderr"],
-                "filters": ["restrict_to_subprocess"],
-                "formatter": "indent",
-            },
-            "user_log": {
-                "level": "DEBUG",
-                "class": handler_classes["file"],
-                "filename": additional_log_file,
-                "delay": True,
-                "formatter": "indent_with_timestamp",
-            },
-        },
-        "root": {
-            "level": root_level,
-            "handlers": handlers,
-        },
-        "loggers": {
-            "pip._vendor": {
-                "level": vendored_log_level
-            }
-        },
-    })
-
-    return level_number
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
deleted file mode 100644
index 734cba4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/marker_files.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import os.path
-
-DELETE_MARKER_MESSAGE = '''\
-This file is placed here by pip to indicate the source was put
-here by pip.
-
-Once this package is successfully installed this source code will be
-deleted (unless you remove this file).
-'''
-PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
-
-
-def has_delete_marker_file(directory):
-    return os.path.exists(os.path.join(directory, PIP_DELETE_MARKER_FILENAME))
-
-
-def write_delete_marker_file(directory):
-    # type: (str) -> None
-    """
-    Write the pip delete marker file into this directory.
-    """
-    filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
-    with open(filepath, 'w') as marker_fp:
-        marker_fp.write(DELETE_MARKER_MESSAGE)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/misc.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/misc.py
deleted file mode 100644
index b848263..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/misc.py
+++ /dev/null
@@ -1,870 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import contextlib
-import errno
-import getpass
-import io
-import logging
-import os
-import posixpath
-import shutil
-import stat
-import sys
-from collections import deque
-
-from pip._vendor import pkg_resources
-# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is
-#       why we ignore the type on this import.
-from pip._vendor.retrying import retry  # type: ignore
-from pip._vendor.six import PY2, text_type
-from pip._vendor.six.moves import input
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote
-
-from pip import __version__
-from pip._internal.exceptions import CommandError
-from pip._internal.locations import (
-    get_major_minor_version,
-    site_packages,
-    user_site,
-)
-from pip._internal.utils.compat import (
-    WINDOWS,
-    expanduser,
-    stdlib_pkgs,
-    str_to_display,
-)
-from pip._internal.utils.marker_files import write_delete_marker_file
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.virtualenv import (
-    running_under_virtualenv,
-    virtualenv_no_global,
-)
-
-if PY2:
-    from io import BytesIO as StringIO
-else:
-    from io import StringIO
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, AnyStr, Container, Iterable, List, Optional, Text,
-        Tuple, Union, cast,
-    )
-    from pip._vendor.pkg_resources import Distribution
-
-    VersionInfo = Tuple[int, int, int]
-else:
-    # typing's cast() is needed at runtime, but we don't want to import typing.
-    # Thus, we use a dummy no-op version, which we tell mypy to ignore.
-    def cast(type_, value):  # type: ignore
-        return value
-
-
-__all__ = ['rmtree', 'display_path', 'backup_dir',
-           'ask', 'splitext',
-           'format_size', 'is_installable_dir',
-           'normalize_path',
-           'renames', 'get_prog',
-           'captured_stdout', 'ensure_dir',
-           'get_installed_version', 'remove_auth_from_url']
-
-
-logger = logging.getLogger(__name__)
-
-
-def get_pip_version():
-    # type: () -> str
-    pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
-    pip_pkg_dir = os.path.abspath(pip_pkg_dir)
-
-    return (
-        'pip {} from {} (python {})'.format(
-            __version__, pip_pkg_dir, get_major_minor_version(),
-        )
-    )
-
-
-def normalize_version_info(py_version_info):
-    # type: (Tuple[int, ...]) -> Tuple[int, int, int]
-    """
-    Convert a tuple of ints representing a Python version to one of length
-    three.
-
-    :param py_version_info: a tuple of ints representing a Python version,
-        or None to specify no version. The tuple can have any length.
-
-    :return: a tuple of length three if `py_version_info` is non-None.
-        Otherwise, return `py_version_info` unchanged (i.e. None).
-    """
-    if len(py_version_info) < 3:
-        py_version_info += (3 - len(py_version_info)) * (0,)
-    elif len(py_version_info) > 3:
-        py_version_info = py_version_info[:3]
-
-    return cast('VersionInfo', py_version_info)
-
-
-def ensure_dir(path):
-    # type: (AnyStr) -> None
-    """os.path.makedirs without EEXIST."""
-    try:
-        os.makedirs(path)
-    except OSError as e:
-        if e.errno != errno.EEXIST:
-            raise
-
-
-def get_prog():
-    # type: () -> str
-    try:
-        prog = os.path.basename(sys.argv[0])
-        if prog in ('__main__.py', '-c'):
-            return "%s -m pip" % sys.executable
-        else:
-            return prog
-    except (AttributeError, TypeError, IndexError):
-        pass
-    return 'pip'
-
-
-# Retry every half second for up to 3 seconds
-@retry(stop_max_delay=3000, wait_fixed=500)
-def rmtree(dir, ignore_errors=False):
-    # type: (str, bool) -> None
-    shutil.rmtree(dir, ignore_errors=ignore_errors,
-                  onerror=rmtree_errorhandler)
-
-
-def rmtree_errorhandler(func, path, exc_info):
-    """On Windows, the files in .svn are read-only, so when rmtree() tries to
-    remove them, an exception is thrown.  We catch that here, remove the
-    read-only attribute, and hopefully continue without problems."""
-    try:
-        has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE)
-    except (IOError, OSError):
-        # it's equivalent to os.path.exists
-        return
-
-    if has_attr_readonly:
-        # convert to read/write
-        os.chmod(path, stat.S_IWRITE)
-        # use the original function to repeat the operation
-        func(path)
-        return
-    else:
-        raise
-
-
-def path_to_display(path):
-    # type: (Optional[Union[str, Text]]) -> Optional[Text]
-    """
-    Convert a bytes (or text) path to text (unicode in Python 2) for display
-    and logging purposes.
-
-    This function should never error out. Also, this function is mainly needed
-    for Python 2 since in Python 3 str paths are already text.
-    """
-    if path is None:
-        return None
-    if isinstance(path, text_type):
-        return path
-    # Otherwise, path is a bytes object (str in Python 2).
-    try:
-        display_path = path.decode(sys.getfilesystemencoding(), 'strict')
-    except UnicodeDecodeError:
-        # Include the full bytes to make troubleshooting easier, even though
-        # it may not be very human readable.
-        if PY2:
-            # Convert the bytes to a readable str representation using
-            # repr(), and then convert the str to unicode.
-            #   Also, we add the prefix "b" to the repr() return value both
-            # to make the Python 2 output look like the Python 3 output, and
-            # to signal to the user that this is a bytes representation.
-            display_path = str_to_display('b{!r}'.format(path))
-        else:
-            # Silence the "F821 undefined name 'ascii'" flake8 error since
-            # in Python 3 ascii() is a built-in.
-            display_path = ascii(path)  # noqa: F821
-
-    return display_path
-
-
-def display_path(path):
-    # type: (Union[str, Text]) -> str
-    """Gives the display value for a given path, making it relative to cwd
-    if possible."""
-    path = os.path.normcase(os.path.abspath(path))
-    if sys.version_info[0] == 2:
-        path = path.decode(sys.getfilesystemencoding(), 'replace')
-        path = path.encode(sys.getdefaultencoding(), 'replace')
-    if path.startswith(os.getcwd() + os.path.sep):
-        path = '.' + path[len(os.getcwd()):]
-    return path
-
-
-def backup_dir(dir, ext='.bak'):
-    # type: (str, str) -> str
-    """Figure out the name of a directory to back up the given dir to
-    (adding .bak, .bak2, etc)"""
-    n = 1
-    extension = ext
-    while os.path.exists(dir + extension):
-        n += 1
-        extension = ext + str(n)
-    return dir + extension
-
-
-def ask_path_exists(message, options):
-    # type: (str, Iterable[str]) -> str
-    for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
-        if action in options:
-            return action
-    return ask(message, options)
-
-
-def _check_no_input(message):
-    # type: (str) -> None
-    """Raise an error if no input is allowed."""
-    if os.environ.get('PIP_NO_INPUT'):
-        raise Exception(
-            'No input was expected ($PIP_NO_INPUT set); question: %s' %
-            message
-        )
-
-
-def ask(message, options):
-    # type: (str, Iterable[str]) -> str
-    """Ask the message interactively, with the given possible responses"""
-    while 1:
-        _check_no_input(message)
-        response = input(message)
-        response = response.strip().lower()
-        if response not in options:
-            print(
-                'Your response (%r) was not one of the expected responses: '
-                '%s' % (response, ', '.join(options))
-            )
-        else:
-            return response
-
-
-def ask_input(message):
-    # type: (str) -> str
-    """Ask for input interactively."""
-    _check_no_input(message)
-    return input(message)
-
-
-def ask_password(message):
-    # type: (str) -> str
-    """Ask for a password interactively."""
-    _check_no_input(message)
-    return getpass.getpass(message)
-
-
-def format_size(bytes):
-    # type: (float) -> str
-    if bytes > 1000 * 1000:
-        return '%.1fMB' % (bytes / 1000.0 / 1000)
-    elif bytes > 10 * 1000:
-        return '%ikB' % (bytes / 1000)
-    elif bytes > 1000:
-        return '%.1fkB' % (bytes / 1000.0)
-    else:
-        return '%ibytes' % bytes
-
-
-def is_installable_dir(path):
-    # type: (str) -> bool
-    """Is path is a directory containing setup.py or pyproject.toml?
-    """
-    if not os.path.isdir(path):
-        return False
-    setup_py = os.path.join(path, 'setup.py')
-    if os.path.isfile(setup_py):
-        return True
-    pyproject_toml = os.path.join(path, 'pyproject.toml')
-    if os.path.isfile(pyproject_toml):
-        return True
-    return False
-
-
-def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
-    """Yield pieces of data from a file-like object until EOF."""
-    while True:
-        chunk = file.read(size)
-        if not chunk:
-            break
-        yield chunk
-
-
-def normalize_path(path, resolve_symlinks=True):
-    # type: (str, bool) -> str
-    """
-    Convert a path to its canonical, case-normalized, absolute version.
-
-    """
-    path = expanduser(path)
-    if resolve_symlinks:
-        path = os.path.realpath(path)
-    else:
-        path = os.path.abspath(path)
-    return os.path.normcase(path)
-
-
-def splitext(path):
-    # type: (str) -> Tuple[str, str]
-    """Like os.path.splitext, but take off .tar too"""
-    base, ext = posixpath.splitext(path)
-    if base.lower().endswith('.tar'):
-        ext = base[-4:] + ext
-        base = base[:-4]
-    return base, ext
-
-
-def renames(old, new):
-    # type: (str, str) -> None
-    """Like os.renames(), but handles renaming across devices."""
-    # Implementation borrowed from os.renames().
-    head, tail = os.path.split(new)
-    if head and tail and not os.path.exists(head):
-        os.makedirs(head)
-
-    shutil.move(old, new)
-
-    head, tail = os.path.split(old)
-    if head and tail:
-        try:
-            os.removedirs(head)
-        except OSError:
-            pass
-
-
-def is_local(path):
-    # type: (str) -> bool
-    """
-    Return True if path is within sys.prefix, if we're running in a virtualenv.
-
-    If we're not in a virtualenv, all paths are considered "local."
-
-    Caution: this function assumes the head of path has been normalized
-    with normalize_path.
-    """
-    if not running_under_virtualenv():
-        return True
-    return path.startswith(normalize_path(sys.prefix))
-
-
-def dist_is_local(dist):
-    # type: (Distribution) -> bool
-    """
-    Return True if given Distribution object is installed locally
-    (i.e. within current virtualenv).
-
-    Always True if we're not in a virtualenv.
-
-    """
-    return is_local(dist_location(dist))
-
-
-def dist_in_usersite(dist):
-    # type: (Distribution) -> bool
-    """
-    Return True if given Distribution is installed in user site.
-    """
-    return dist_location(dist).startswith(normalize_path(user_site))
-
-
-def dist_in_site_packages(dist):
-    # type: (Distribution) -> bool
-    """
-    Return True if given Distribution is installed in
-    sysconfig.get_python_lib().
-    """
-    return dist_location(dist).startswith(normalize_path(site_packages))
-
-
-def dist_is_editable(dist):
-    # type: (Distribution) -> bool
-    """
-    Return True if given Distribution is an editable install.
-    """
-    for path_item in sys.path:
-        egg_link = os.path.join(path_item, dist.project_name + '.egg-link')
-        if os.path.isfile(egg_link):
-            return True
-    return False
-
-
-def get_installed_distributions(
-        local_only=True,  # type: bool
-        skip=stdlib_pkgs,  # type: Container[str]
-        include_editables=True,  # type: bool
-        editables_only=False,  # type: bool
-        user_only=False,  # type: bool
-        paths=None  # type: Optional[List[str]]
-):
-    # type: (...) -> List[Distribution]
-    """
-    Return a list of installed Distribution objects.
-
-    If ``local_only`` is True (default), only return installations
-    local to the current virtualenv, if in a virtualenv.
-
-    ``skip`` argument is an iterable of lower-case project names to
-    ignore; defaults to stdlib_pkgs
-
-    If ``include_editables`` is False, don't report editables.
-
-    If ``editables_only`` is True , only report editables.
-
-    If ``user_only`` is True , only report installations in the user
-    site directory.
-
-    If ``paths`` is set, only report the distributions present at the
-    specified list of locations.
-    """
-    if paths:
-        working_set = pkg_resources.WorkingSet(paths)
-    else:
-        working_set = pkg_resources.working_set
-
-    if local_only:
-        local_test = dist_is_local
-    else:
-        def local_test(d):
-            return True
-
-    if include_editables:
-        def editable_test(d):
-            return True
-    else:
-        def editable_test(d):
-            return not dist_is_editable(d)
-
-    if editables_only:
-        def editables_only_test(d):
-            return dist_is_editable(d)
-    else:
-        def editables_only_test(d):
-            return True
-
-    if user_only:
-        user_test = dist_in_usersite
-    else:
-        def user_test(d):
-            return True
-
-    # because of pkg_resources vendoring, mypy cannot find stub in typeshed
-    return [d for d in working_set  # type: ignore
-            if local_test(d) and
-            d.key not in skip and
-            editable_test(d) and
-            editables_only_test(d) and
-            user_test(d)
-            ]
-
-
-def egg_link_path(dist):
-    # type: (Distribution) -> Optional[str]
-    """
-    Return the path for the .egg-link file if it exists, otherwise, None.
-
-    There's 3 scenarios:
-    1) not in a virtualenv
-       try to find in site.USER_SITE, then site_packages
-    2) in a no-global virtualenv
-       try to find in site_packages
-    3) in a yes-global virtualenv
-       try to find in site_packages, then site.USER_SITE
-       (don't look in global location)
-
-    For #1 and #3, there could be odd cases, where there's an egg-link in 2
-    locations.
-
-    This method will just return the first one found.
-    """
-    sites = []
-    if running_under_virtualenv():
-        sites.append(site_packages)
-        if not virtualenv_no_global() and user_site:
-            sites.append(user_site)
-    else:
-        if user_site:
-            sites.append(user_site)
-        sites.append(site_packages)
-
-    for site in sites:
-        egglink = os.path.join(site, dist.project_name) + '.egg-link'
-        if os.path.isfile(egglink):
-            return egglink
-    return None
-
-
-def dist_location(dist):
-    # type: (Distribution) -> str
-    """
-    Get the site-packages location of this distribution. Generally
-    this is dist.location, except in the case of develop-installed
-    packages, where dist.location is the source code location, and we
-    want to know where the egg-link file is.
-
-    The returned location is normalized (in particular, with symlinks removed).
-    """
-    egg_link = egg_link_path(dist)
-    if egg_link:
-        return normalize_path(egg_link)
-    return normalize_path(dist.location)
-
-
-def write_output(msg, *args):
-    # type: (str, str) -> None
-    logger.info(msg, *args)
-
-
-def _make_build_dir(build_dir):
-    os.makedirs(build_dir)
-    write_delete_marker_file(build_dir)
-
-
-class FakeFile(object):
-    """Wrap a list of lines in an object with readline() to make
-    ConfigParser happy."""
-    def __init__(self, lines):
-        self._gen = (l for l in lines)
-
-    def readline(self):
-        try:
-            try:
-                return next(self._gen)
-            except NameError:
-                return self._gen.next()
-        except StopIteration:
-            return ''
-
-    def __iter__(self):
-        return self._gen
-
-
-class StreamWrapper(StringIO):
-
-    @classmethod
-    def from_stream(cls, orig_stream):
-        cls.orig_stream = orig_stream
-        return cls()
-
-    # compileall.compile_dir() needs stdout.encoding to print to stdout
-    @property
-    def encoding(self):
-        return self.orig_stream.encoding
-
-
-@contextlib.contextmanager
-def captured_output(stream_name):
-    """Return a context manager used by captured_stdout/stdin/stderr
-    that temporarily replaces the sys stream *stream_name* with a StringIO.
-
-    Taken from Lib/support/__init__.py in the CPython repo.
-    """
-    orig_stdout = getattr(sys, stream_name)
-    setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
-    try:
-        yield getattr(sys, stream_name)
-    finally:
-        setattr(sys, stream_name, orig_stdout)
-
-
-def captured_stdout():
-    """Capture the output of sys.stdout:
-
-       with captured_stdout() as stdout:
-           print('hello')
-       self.assertEqual(stdout.getvalue(), 'hello\n')
-
-    Taken from Lib/support/__init__.py in the CPython repo.
-    """
-    return captured_output('stdout')
-
-
-def captured_stderr():
-    """
-    See captured_stdout().
-    """
-    return captured_output('stderr')
-
-
-class cached_property(object):
-    """A property that is only computed once per instance and then replaces
-       itself with an ordinary attribute. Deleting the attribute resets the
-       property.
-
-       Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175
-    """
-
-    def __init__(self, func):
-        self.__doc__ = getattr(func, '__doc__')
-        self.func = func
-
-    def __get__(self, obj, cls):
-        if obj is None:
-            # We're being accessed from the class itself, not from an object
-            return self
-        value = obj.__dict__[self.func.__name__] = self.func(obj)
-        return value
-
-
-def get_installed_version(dist_name, working_set=None):
-    """Get the installed version of dist_name avoiding pkg_resources cache"""
-    # Create a requirement that we'll look for inside of setuptools.
-    req = pkg_resources.Requirement.parse(dist_name)
-
-    if working_set is None:
-        # We want to avoid having this cached, so we need to construct a new
-        # working set each time.
-        working_set = pkg_resources.WorkingSet()
-
-    # Get the installed distribution from our working set
-    dist = working_set.find(req)
-
-    # Check to see if we got an installed distribution or not, if we did
-    # we want to return it's version.
-    return dist.version if dist else None
-
-
-def consume(iterator):
-    """Consume an iterable at C speed."""
-    deque(iterator, maxlen=0)
-
-
-# Simulates an enum
-def enum(*sequential, **named):
-    enums = dict(zip(sequential, range(len(sequential))), **named)
-    reverse = {value: key for key, value in enums.items()}
-    enums['reverse_mapping'] = reverse
-    return type('Enum', (), enums)
-
-
-def build_netloc(host, port):
-    # type: (str, Optional[int]) -> str
-    """
-    Build a netloc from a host-port pair
-    """
-    if port is None:
-        return host
-    if ':' in host:
-        # Only wrap host with square brackets when it is IPv6
-        host = '[{}]'.format(host)
-    return '{}:{}'.format(host, port)
-
-
-def build_url_from_netloc(netloc, scheme='https'):
-    # type: (str, str) -> str
-    """
-    Build a full URL from a netloc.
-    """
-    if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc:
-        # It must be a bare IPv6 address, so wrap it with brackets.
-        netloc = '[{}]'.format(netloc)
-    return '{}://{}'.format(scheme, netloc)
-
-
-def parse_netloc(netloc):
-    # type: (str) -> Tuple[str, Optional[int]]
-    """
-    Return the host-port pair from a netloc.
-    """
-    url = build_url_from_netloc(netloc)
-    parsed = urllib_parse.urlparse(url)
-    return parsed.hostname, parsed.port
-
-
-def split_auth_from_netloc(netloc):
-    """
-    Parse out and remove the auth information from a netloc.
-
-    Returns: (netloc, (username, password)).
-    """
-    if '@' not in netloc:
-        return netloc, (None, None)
-
-    # Split from the right because that's how urllib.parse.urlsplit()
-    # behaves if more than one @ is present (which can be checked using
-    # the password attribute of urlsplit()'s return value).
-    auth, netloc = netloc.rsplit('@', 1)
-    if ':' in auth:
-        # Split from the left because that's how urllib.parse.urlsplit()
-        # behaves if more than one : is present (which again can be checked
-        # using the password attribute of the return value)
-        user_pass = auth.split(':', 1)
-    else:
-        user_pass = auth, None
-
-    user_pass = tuple(
-        None if x is None else urllib_unquote(x) for x in user_pass
-    )
-
-    return netloc, user_pass
-
-
-def redact_netloc(netloc):
-    # type: (str) -> str
-    """
-    Replace the sensitive data in a netloc with "****", if it exists.
-
-    For example:
-        - "user:pass@example.com" returns "user:****@example.com"
-        - "accesstoken@example.com" returns "****@example.com"
-    """
-    netloc, (user, password) = split_auth_from_netloc(netloc)
-    if user is None:
-        return netloc
-    if password is None:
-        user = '****'
-        password = ''
-    else:
-        user = urllib_parse.quote(user)
-        password = ':****'
-    return '{user}{password}@{netloc}'.format(user=user,
-                                              password=password,
-                                              netloc=netloc)
-
-
-def _transform_url(url, transform_netloc):
-    """Transform and replace netloc in a url.
-
-    transform_netloc is a function taking the netloc and returning a
-    tuple. The first element of this tuple is the new netloc. The
-    entire tuple is returned.
-
-    Returns a tuple containing the transformed url as item 0 and the
-    original tuple returned by transform_netloc as item 1.
-    """
-    purl = urllib_parse.urlsplit(url)
-    netloc_tuple = transform_netloc(purl.netloc)
-    # stripped url
-    url_pieces = (
-        purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment
-    )
-    surl = urllib_parse.urlunsplit(url_pieces)
-    return surl, netloc_tuple
-
-
-def _get_netloc(netloc):
-    return split_auth_from_netloc(netloc)
-
-
-def _redact_netloc(netloc):
-    return (redact_netloc(netloc),)
-
-
-def split_auth_netloc_from_url(url):
-    # type: (str) -> Tuple[str, str, Tuple[str, str]]
-    """
-    Parse a url into separate netloc, auth, and url with no auth.
-
-    Returns: (url_without_auth, netloc, (username, password))
-    """
-    url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
-    return url_without_auth, netloc, auth
-
-
-def remove_auth_from_url(url):
-    # type: (str) -> str
-    """Return a copy of url with 'username:password@' removed."""
-    # username/pass params are passed to subversion through flags
-    # and are not recognized in the url.
-    return _transform_url(url, _get_netloc)[0]
-
-
-def redact_auth_from_url(url):
-    # type: (str) -> str
-    """Replace the password in a given url with ****."""
-    return _transform_url(url, _redact_netloc)[0]
-
-
-class HiddenText(object):
-    def __init__(
-        self,
-        secret,    # type: str
-        redacted,  # type: str
-    ):
-        # type: (...) -> None
-        self.secret = secret
-        self.redacted = redacted
-
-    def __repr__(self):
-        # type: (...) -> str
-        return ''.format(str(self))
-
-    def __str__(self):
-        # type: (...) -> str
-        return self.redacted
-
-    # This is useful for testing.
-    def __eq__(self, other):
-        # type: (Any) -> bool
-        if type(self) != type(other):
-            return False
-
-        # The string being used for redaction doesn't also have to match,
-        # just the raw, original string.
-        return (self.secret == other.secret)
-
-    # We need to provide an explicit __ne__ implementation for Python 2.
-    # TODO: remove this when we drop PY2 support.
-    def __ne__(self, other):
-        # type: (Any) -> bool
-        return not self == other
-
-
-def hide_value(value):
-    # type: (str) -> HiddenText
-    return HiddenText(value, redacted='****')
-
-
-def hide_url(url):
-    # type: (str) -> HiddenText
-    redacted = redact_auth_from_url(url)
-    return HiddenText(url, redacted=redacted)
-
-
-def protect_pip_from_modification_on_windows(modifying_pip):
-    # type: (bool) -> None
-    """Protection of pip.exe from modification on Windows
-
-    On Windows, any operation modifying pip should be run as:
-        python -m pip ...
-    """
-    pip_names = set()
-    for ext in ('', '.exe'):
-        pip_names.add('pip{ext}'.format(ext=ext))
-        pip_names.add('pip{}{ext}'.format(sys.version_info[0], ext=ext))
-        pip_names.add('pip{}.{}{ext}'.format(*sys.version_info[:2], ext=ext))
-
-    # See https://github.com/pypa/pip/issues/1299 for more discussion
-    should_show_use_python_msg = (
-        modifying_pip and
-        WINDOWS and
-        os.path.basename(sys.argv[0]) in pip_names
-    )
-
-    if should_show_use_python_msg:
-        new_command = [
-            sys.executable, "-m", "pip"
-        ] + sys.argv[1:]
-        raise CommandError(
-            'To modify pip, please run the following command:\n{}'
-            .format(" ".join(new_command))
-        )
-
-
-def is_console_interactive():
-    # type: () -> bool
-    """Is this console interactive?
-    """
-    return sys.stdin is not None and sys.stdin.isatty()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/models.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/models.py
deleted file mode 100644
index 29e1441..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/models.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""Utilities for defining models
-"""
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-import operator
-
-
-class KeyBasedCompareMixin(object):
-    """Provides comparison capabilities that is based on a key
-    """
-
-    def __init__(self, key, defining_class):
-        self._compare_key = key
-        self._defining_class = defining_class
-
-    def __hash__(self):
-        return hash(self._compare_key)
-
-    def __lt__(self, other):
-        return self._compare(other, operator.__lt__)
-
-    def __le__(self, other):
-        return self._compare(other, operator.__le__)
-
-    def __gt__(self, other):
-        return self._compare(other, operator.__gt__)
-
-    def __ge__(self, other):
-        return self._compare(other, operator.__ge__)
-
-    def __eq__(self, other):
-        return self._compare(other, operator.__eq__)
-
-    def __ne__(self, other):
-        return self._compare(other, operator.__ne__)
-
-    def _compare(self, other, method):
-        if not isinstance(other, self._defining_class):
-            return NotImplemented
-
-        return method(self._compare_key, other._compare_key)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/packaging.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
deleted file mode 100644
index 68aa86e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/packaging.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from __future__ import absolute_import
-
-import logging
-from email.parser import FeedParser
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging import specifiers, version
-
-from pip._internal.exceptions import NoneMetadataError
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Tuple
-    from email.message import Message
-    from pip._vendor.pkg_resources import Distribution
-
-
-logger = logging.getLogger(__name__)
-
-
-def check_requires_python(requires_python, version_info):
-    # type: (Optional[str], Tuple[int, ...]) -> bool
-    """
-    Check if the given Python version matches a "Requires-Python" specifier.
-
-    :param version_info: A 3-tuple of ints representing a Python
-        major-minor-micro version to check (e.g. `sys.version_info[:3]`).
-
-    :return: `True` if the given Python version satisfies the requirement.
-        Otherwise, return `False`.
-
-    :raises InvalidSpecifier: If `requires_python` has an invalid format.
-    """
-    if requires_python is None:
-        # The package provides no information
-        return True
-    requires_python_specifier = specifiers.SpecifierSet(requires_python)
-
-    python_version = version.parse('.'.join(map(str, version_info)))
-    return python_version in requires_python_specifier
-
-
-def get_metadata(dist):
-    # type: (Distribution) -> Message
-    """
-    :raises NoneMetadataError: if the distribution reports `has_metadata()`
-        True but `get_metadata()` returns None.
-    """
-    metadata_name = 'METADATA'
-    if (isinstance(dist, pkg_resources.DistInfoDistribution) and
-            dist.has_metadata(metadata_name)):
-        metadata = dist.get_metadata(metadata_name)
-    elif dist.has_metadata('PKG-INFO'):
-        metadata_name = 'PKG-INFO'
-        metadata = dist.get_metadata(metadata_name)
-    else:
-        logger.warning("No metadata found in %s", display_path(dist.location))
-        metadata = ''
-
-    if metadata is None:
-        raise NoneMetadataError(dist, metadata_name)
-
-    feed_parser = FeedParser()
-    # The following line errors out if with a "NoneType" TypeError if
-    # passed metadata=None.
-    feed_parser.feed(metadata)
-    return feed_parser.close()
-
-
-def get_requires_python(dist):
-    # type: (pkg_resources.Distribution) -> Optional[str]
-    """
-    Return the "Requires-Python" metadata for a distribution, or None
-    if not present.
-    """
-    pkg_info_dict = get_metadata(dist)
-    requires_python = pkg_info_dict.get('Requires-Python')
-
-    if requires_python is not None:
-        # Convert to a str to satisfy the type checker, since requires_python
-        # can be a Header object.
-        requires_python = str(requires_python)
-
-    return requires_python
-
-
-def get_installer(dist):
-    # type: (Distribution) -> str
-    if dist.has_metadata('INSTALLER'):
-        for line in dist.get_metadata_lines('INSTALLER'):
-            if line.strip():
-                return line.strip()
-    return ''
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
deleted file mode 100644
index 12d866e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/setuptools_build.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import sys
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import List, Sequence
-
-# Shim to wrap setup.py invocation with setuptools
-#
-# We set sys.argv[0] to the path to the underlying setup.py file so
-# setuptools / distutils don't take the path to the setup.py to be "-c" when
-# invoking via the shim.  This avoids e.g. the following manifest_maker
-# warning: "warning: manifest_maker: standard file '-c' not found".
-_SETUPTOOLS_SHIM = (
-    "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
-    "f=getattr(tokenize, 'open', open)(__file__);"
-    "code=f.read().replace('\\r\\n', '\\n');"
-    "f.close();"
-    "exec(compile(code, __file__, 'exec'))"
-)
-
-
-def make_setuptools_shim_args(
-        setup_py_path,  # type: str
-        global_options=None,  # type: Sequence[str]
-        no_user_config=False,  # type: bool
-        unbuffered_output=False  # type: bool
-):
-    # type: (...) -> List[str]
-    """
-    Get setuptools command arguments with shim wrapped setup file invocation.
-
-    :param setup_py_path: The path to setup.py to be wrapped.
-    :param global_options: Additional global options.
-    :param no_user_config: If True, disables personal user configuration.
-    :param unbuffered_output: If True, adds the unbuffered switch to the
-     argument list.
-    """
-    args = [sys.executable]
-    if unbuffered_output:
-        args.append('-u')
-    args.extend(['-c', _SETUPTOOLS_SHIM.format(setup_py_path)])
-    if global_options:
-        args.extend(global_options)
-    if no_user_config:
-        args.append('--no-user-cfg')
-    return args
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/subprocess.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/subprocess.py
deleted file mode 100644
index 2a0c5d1..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/subprocess.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-import subprocess
-
-from pip._vendor.six.moves import shlex_quote
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.compat import console_to_str, str_to_display
-from pip._internal.utils.logging import subprocess_logger
-from pip._internal.utils.misc import HiddenText, path_to_display
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Callable, Iterable, List, Mapping, Optional, Text, Union,
-    )
-    from pip._internal.utils.ui import SpinnerInterface
-
-    CommandArgs = List[Union[str, HiddenText]]
-
-
-LOG_DIVIDER = '----------------------------------------'
-
-
-def make_command(*args):
-    # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs
-    """
-    Create a CommandArgs object.
-    """
-    command_args = []  # type: CommandArgs
-    for arg in args:
-        # Check for list instead of CommandArgs since CommandArgs is
-        # only known during type-checking.
-        if isinstance(arg, list):
-            command_args.extend(arg)
-        else:
-            # Otherwise, arg is str or HiddenText.
-            command_args.append(arg)
-
-    return command_args
-
-
-def format_command_args(args):
-    # type: (Union[List[str], CommandArgs]) -> str
-    """
-    Format command arguments for display.
-    """
-    # For HiddenText arguments, display the redacted form by calling str().
-    # Also, we don't apply str() to arguments that aren't HiddenText since
-    # this can trigger a UnicodeDecodeError in Python 2 if the argument
-    # has type unicode and includes a non-ascii character.  (The type
-    # checker doesn't ensure the annotations are correct in all cases.)
-    return ' '.join(
-        shlex_quote(str(arg)) if isinstance(arg, HiddenText)
-        else shlex_quote(arg) for arg in args
-    )
-
-
-def reveal_command_args(args):
-    # type: (Union[List[str], CommandArgs]) -> List[str]
-    """
-    Return the arguments in their raw, unredacted form.
-    """
-    return [
-        arg.secret if isinstance(arg, HiddenText) else arg for arg in args
-    ]
-
-
-def make_subprocess_output_error(
-    cmd_args,     # type: Union[List[str], CommandArgs]
-    cwd,          # type: Optional[str]
-    lines,        # type: List[Text]
-    exit_status,  # type: int
-):
-    # type: (...) -> Text
-    """
-    Create and return the error message to use to log a subprocess error
-    with command output.
-
-    :param lines: A list of lines, each ending with a newline.
-    """
-    command = format_command_args(cmd_args)
-    # Convert `command` and `cwd` to text (unicode in Python 2) so we can use
-    # them as arguments in the unicode format string below. This avoids
-    # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2
-    # if either contains a non-ascii character.
-    command_display = str_to_display(command, desc='command bytes')
-    cwd_display = path_to_display(cwd)
-
-    # We know the joined output value ends in a newline.
-    output = ''.join(lines)
-    msg = (
-        # Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
-        # codec can't encode character ..." in Python 2 when a format
-        # argument (e.g. `output`) has a non-ascii character.
-        u'Command errored out with exit status {exit_status}:\n'
-        ' command: {command_display}\n'
-        '     cwd: {cwd_display}\n'
-        'Complete output ({line_count} lines):\n{output}{divider}'
-    ).format(
-        exit_status=exit_status,
-        command_display=command_display,
-        cwd_display=cwd_display,
-        line_count=len(lines),
-        output=output,
-        divider=LOG_DIVIDER,
-    )
-    return msg
-
-
-def call_subprocess(
-    cmd,  # type: Union[List[str], CommandArgs]
-    show_stdout=False,  # type: bool
-    cwd=None,  # type: Optional[str]
-    on_returncode='raise',  # type: str
-    extra_ok_returncodes=None,  # type: Optional[Iterable[int]]
-    command_desc=None,  # type: Optional[str]
-    extra_environ=None,  # type: Optional[Mapping[str, Any]]
-    unset_environ=None,  # type: Optional[Iterable[str]]
-    spinner=None,  # type: Optional[SpinnerInterface]
-    log_failed_cmd=True  # type: Optional[bool]
-):
-    # type: (...) -> Text
-    """
-    Args:
-      show_stdout: if true, use INFO to log the subprocess's stderr and
-        stdout streams.  Otherwise, use DEBUG.  Defaults to False.
-      extra_ok_returncodes: an iterable of integer return codes that are
-        acceptable, in addition to 0. Defaults to None, which means [].
-      unset_environ: an iterable of environment variable names to unset
-        prior to calling subprocess.Popen().
-      log_failed_cmd: if false, failed commands are not logged, only raised.
-    """
-    if extra_ok_returncodes is None:
-        extra_ok_returncodes = []
-    if unset_environ is None:
-        unset_environ = []
-    # Most places in pip use show_stdout=False. What this means is--
-    #
-    # - We connect the child's output (combined stderr and stdout) to a
-    #   single pipe, which we read.
-    # - We log this output to stderr at DEBUG level as it is received.
-    # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
-    #   requested), then we show a spinner so the user can still see the
-    #   subprocess is in progress.
-    # - If the subprocess exits with an error, we log the output to stderr
-    #   at ERROR level if it hasn't already been displayed to the console
-    #   (e.g. if --verbose logging wasn't enabled).  This way we don't log
-    #   the output to the console twice.
-    #
-    # If show_stdout=True, then the above is still done, but with DEBUG
-    # replaced by INFO.
-    if show_stdout:
-        # Then log the subprocess output at INFO level.
-        log_subprocess = subprocess_logger.info
-        used_level = logging.INFO
-    else:
-        # Then log the subprocess output using DEBUG.  This also ensures
-        # it will be logged to the log file (aka user_log), if enabled.
-        log_subprocess = subprocess_logger.debug
-        used_level = logging.DEBUG
-
-    # Whether the subprocess will be visible in the console.
-    showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
-
-    # Only use the spinner if we're not showing the subprocess output
-    # and we have a spinner.
-    use_spinner = not showing_subprocess and spinner is not None
-
-    if command_desc is None:
-        command_desc = format_command_args(cmd)
-
-    log_subprocess("Running command %s", command_desc)
-    env = os.environ.copy()
-    if extra_environ:
-        env.update(extra_environ)
-    for name in unset_environ:
-        env.pop(name, None)
-    try:
-        proc = subprocess.Popen(
-            # Convert HiddenText objects to the underlying str.
-            reveal_command_args(cmd),
-            stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
-            stdout=subprocess.PIPE, cwd=cwd, env=env,
-        )
-        proc.stdin.close()
-    except Exception as exc:
-        if log_failed_cmd:
-            subprocess_logger.critical(
-                "Error %s while executing command %s", exc, command_desc,
-            )
-        raise
-    all_output = []
-    while True:
-        # The "line" value is a unicode string in Python 2.
-        line = console_to_str(proc.stdout.readline())
-        if not line:
-            break
-        line = line.rstrip()
-        all_output.append(line + '\n')
-
-        # Show the line immediately.
-        log_subprocess(line)
-        # Update the spinner.
-        if use_spinner:
-            spinner.spin()
-    try:
-        proc.wait()
-    finally:
-        if proc.stdout:
-            proc.stdout.close()
-    proc_had_error = (
-        proc.returncode and proc.returncode not in extra_ok_returncodes
-    )
-    if use_spinner:
-        if proc_had_error:
-            spinner.finish("error")
-        else:
-            spinner.finish("done")
-    if proc_had_error:
-        if on_returncode == 'raise':
-            if not showing_subprocess and log_failed_cmd:
-                # Then the subprocess streams haven't been logged to the
-                # console yet.
-                msg = make_subprocess_output_error(
-                    cmd_args=cmd,
-                    cwd=cwd,
-                    lines=all_output,
-                    exit_status=proc.returncode,
-                )
-                subprocess_logger.error(msg)
-            exc_msg = (
-                'Command errored out with exit status {}: {} '
-                'Check the logs for full command output.'
-            ).format(proc.returncode, command_desc)
-            raise InstallationError(exc_msg)
-        elif on_returncode == 'warn':
-            subprocess_logger.warning(
-                'Command "%s" had error code %s in %s',
-                command_desc, proc.returncode, cwd,
-            )
-        elif on_returncode == 'ignore':
-            pass
-        else:
-            raise ValueError('Invalid value: on_returncode=%s' %
-                             repr(on_returncode))
-    return ''.join(all_output)
-
-
-def runner_with_spinner_message(message):
-    # type: (str) -> Callable
-    """Provide a subprocess_runner that shows a spinner message.
-
-    Intended for use with for pep517's Pep517HookCaller. Thus, the runner has
-    an API that matches what's expected by Pep517HookCaller.subprocess_runner.
-    """
-
-    def runner(
-        cmd,  # type: List[str]
-        cwd=None,  # type: Optional[str]
-        extra_environ=None  # type: Optional[Mapping[str, Any]]
-    ):
-        # type: (...) -> None
-        with open_spinner(message) as spinner:
-            call_subprocess(
-                cmd,
-                cwd=cwd,
-                extra_environ=extra_environ,
-                spinner=spinner,
-            )
-
-    return runner
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
deleted file mode 100644
index 77d40be..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/temp_dir.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import errno
-import itertools
-import logging
-import os.path
-import tempfile
-
-from pip._internal.utils.misc import rmtree
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional
-
-
-logger = logging.getLogger(__name__)
-
-
-class TempDirectory(object):
-    """Helper class that owns and cleans up a temporary directory.
-
-    This class can be used as a context manager or as an OO representation of a
-    temporary directory.
-
-    Attributes:
-        path
-            Location to the created temporary directory
-        delete
-            Whether the directory should be deleted when exiting
-            (when used as a contextmanager)
-
-    Methods:
-        cleanup()
-            Deletes the temporary directory
-
-    When used as a context manager, if the delete attribute is True, on
-    exiting the context the temporary directory is deleted.
-    """
-
-    def __init__(
-        self,
-        path=None,    # type: Optional[str]
-        delete=None,  # type: Optional[bool]
-        kind="temp"
-    ):
-        super(TempDirectory, self).__init__()
-
-        if path is None and delete is None:
-            # If we were not given an explicit directory, and we were not given
-            # an explicit delete option, then we'll default to deleting.
-            delete = True
-
-        if path is None:
-            path = self._create(kind)
-
-        self._path = path
-        self._deleted = False
-        self.delete = delete
-        self.kind = kind
-
-    @property
-    def path(self):
-        # type: () -> str
-        assert not self._deleted, (
-            "Attempted to access deleted path: {}".format(self._path)
-        )
-        return self._path
-
-    def __repr__(self):
-        return "<{} {!r}>".format(self.__class__.__name__, self.path)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc, value, tb):
-        if self.delete:
-            self.cleanup()
-
-    def _create(self, kind):
-        """Create a temporary directory and store its path in self.path
-        """
-        # We realpath here because some systems have their default tmpdir
-        # symlinked to another directory.  This tends to confuse build
-        # scripts, so we canonicalize the path by traversing potential
-        # symlinks here.
-        path = os.path.realpath(
-            tempfile.mkdtemp(prefix="pip-{}-".format(kind))
-        )
-        logger.debug("Created temporary directory: {}".format(path))
-        return path
-
-    def cleanup(self):
-        """Remove the temporary directory created and reset state
-        """
-        self._deleted = True
-        if os.path.exists(self._path):
-            rmtree(self._path)
-
-
-class AdjacentTempDirectory(TempDirectory):
-    """Helper class that creates a temporary directory adjacent to a real one.
-
-    Attributes:
-        original
-            The original directory to create a temp directory for.
-        path
-            After calling create() or entering, contains the full
-            path to the temporary directory.
-        delete
-            Whether the directory should be deleted when exiting
-            (when used as a contextmanager)
-
-    """
-    # The characters that may be used to name the temp directory
-    # We always prepend a ~ and then rotate through these until
-    # a usable name is found.
-    # pkg_resources raises a different error for .dist-info folder
-    # with leading '-' and invalid metadata
-    LEADING_CHARS = "-~.=%0123456789"
-
-    def __init__(self, original, delete=None):
-        self.original = original.rstrip('/\\')
-        super(AdjacentTempDirectory, self).__init__(delete=delete)
-
-    @classmethod
-    def _generate_names(cls, name):
-        """Generates a series of temporary names.
-
-        The algorithm replaces the leading characters in the name
-        with ones that are valid filesystem characters, but are not
-        valid package names (for both Python and pip definitions of
-        package).
-        """
-        for i in range(1, len(name)):
-            for candidate in itertools.combinations_with_replacement(
-                    cls.LEADING_CHARS, i - 1):
-                new_name = '~' + ''.join(candidate) + name[i:]
-                if new_name != name:
-                    yield new_name
-
-        # If we make it this far, we will have to make a longer name
-        for i in range(len(cls.LEADING_CHARS)):
-            for candidate in itertools.combinations_with_replacement(
-                    cls.LEADING_CHARS, i):
-                new_name = '~' + ''.join(candidate) + name
-                if new_name != name:
-                    yield new_name
-
-    def _create(self, kind):
-        root, name = os.path.split(self.original)
-        for candidate in self._generate_names(name):
-            path = os.path.join(root, candidate)
-            try:
-                os.mkdir(path)
-            except OSError as ex:
-                # Continue if the name exists already
-                if ex.errno != errno.EEXIST:
-                    raise
-            else:
-                path = os.path.realpath(path)
-                break
-        else:
-            # Final fallback on the default behavior.
-            path = os.path.realpath(
-                tempfile.mkdtemp(prefix="pip-{}-".format(kind))
-            )
-
-        logger.debug("Created temporary directory: {}".format(path))
-        return path
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/typing.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/typing.py
deleted file mode 100644
index 10170ce..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/typing.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""For neatly implementing static typing in pip.
-
-`mypy` - the static type analysis tool we use - uses the `typing` module, which
-provides core functionality fundamental to mypy's functioning.
-
-Generally, `typing` would be imported at runtime and used in that fashion -
-it acts as a no-op at runtime and does not have any run-time overhead by
-design.
-
-As it turns out, `typing` is not vendorable - it uses separate sources for
-Python 2/Python 3. Thus, this codebase can not expect it to be present.
-To work around this, mypy allows the typing import to be behind a False-y
-optional to prevent it from running at runtime and type-comments can be used
-to remove the need for the types to be accessible directly during runtime.
-
-This module provides the False-y guard in a nicely named fashion so that a
-curious maintainer can reach here to read this.
-
-In pip, all static-typing related imports should be guarded as follows:
-
-    from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-    if MYPY_CHECK_RUNNING:
-        from typing import ...
-
-Ref: https://github.com/python/mypy/issues/3216
-"""
-
-MYPY_CHECK_RUNNING = False
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/ui.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/ui.py
deleted file mode 100644
index f96ab54..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/ui.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import, division
-
-import contextlib
-import itertools
-import logging
-import sys
-import time
-from signal import SIGINT, default_int_handler, signal
-
-from pip._vendor import six
-from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
-from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
-from pip._vendor.progress.spinner import Spinner
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.logging import get_indentation
-from pip._internal.utils.misc import format_size
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Any, Iterator, IO
-
-try:
-    from pip._vendor import colorama
-# Lots of different errors can come from this, including SystemError and
-# ImportError.
-except Exception:
-    colorama = None
-
-logger = logging.getLogger(__name__)
-
-
-def _select_progress_class(preferred, fallback):
-    encoding = getattr(preferred.file, "encoding", None)
-
-    # If we don't know what encoding this file is in, then we'll just assume
-    # that it doesn't support unicode and use the ASCII bar.
-    if not encoding:
-        return fallback
-
-    # Collect all of the possible characters we want to use with the preferred
-    # bar.
-    characters = [
-        getattr(preferred, "empty_fill", six.text_type()),
-        getattr(preferred, "fill", six.text_type()),
-    ]
-    characters += list(getattr(preferred, "phases", []))
-
-    # Try to decode the characters we're using for the bar using the encoding
-    # of the given file, if this works then we'll assume that we can use the
-    # fancier bar and if not we'll fall back to the plaintext bar.
-    try:
-        six.text_type().join(characters).encode(encoding)
-    except UnicodeEncodeError:
-        return fallback
-    else:
-        return preferred
-
-
-_BaseBar = _select_progress_class(IncrementalBar, Bar)  # type: Any
-
-
-class InterruptibleMixin(object):
-    """
-    Helper to ensure that self.finish() gets called on keyboard interrupt.
-
-    This allows downloads to be interrupted without leaving temporary state
-    (like hidden cursors) behind.
-
-    This class is similar to the progress library's existing SigIntMixin
-    helper, but as of version 1.2, that helper has the following problems:
-
-    1. It calls sys.exit().
-    2. It discards the existing SIGINT handler completely.
-    3. It leaves its own handler in place even after an uninterrupted finish,
-       which will have unexpected delayed effects if the user triggers an
-       unrelated keyboard interrupt some time after a progress-displaying
-       download has already completed, for example.
-    """
-
-    def __init__(self, *args, **kwargs):
-        """
-        Save the original SIGINT handler for later.
-        """
-        super(InterruptibleMixin, self).__init__(*args, **kwargs)
-
-        self.original_handler = signal(SIGINT, self.handle_sigint)
-
-        # If signal() returns None, the previous handler was not installed from
-        # Python, and we cannot restore it. This probably should not happen,
-        # but if it does, we must restore something sensible instead, at least.
-        # The least bad option should be Python's default SIGINT handler, which
-        # just raises KeyboardInterrupt.
-        if self.original_handler is None:
-            self.original_handler = default_int_handler
-
-    def finish(self):
-        """
-        Restore the original SIGINT handler after finishing.
-
-        This should happen regardless of whether the progress display finishes
-        normally, or gets interrupted.
-        """
-        super(InterruptibleMixin, self).finish()
-        signal(SIGINT, self.original_handler)
-
-    def handle_sigint(self, signum, frame):
-        """
-        Call self.finish() before delegating to the original SIGINT handler.
-
-        This handler should only be in place while the progress display is
-        active.
-        """
-        self.finish()
-        self.original_handler(signum, frame)
-
-
-class SilentBar(Bar):
-
-    def update(self):
-        pass
-
-
-class BlueEmojiBar(IncrementalBar):
-
-    suffix = "%(percent)d%%"
-    bar_prefix = " "
-    bar_suffix = " "
-    phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535")  # type: Any
-
-
-class DownloadProgressMixin(object):
-
-    def __init__(self, *args, **kwargs):
-        super(DownloadProgressMixin, self).__init__(*args, **kwargs)
-        self.message = (" " * (get_indentation() + 2)) + self.message
-
-    @property
-    def downloaded(self):
-        return format_size(self.index)
-
-    @property
-    def download_speed(self):
-        # Avoid zero division errors...
-        if self.avg == 0.0:
-            return "..."
-        return format_size(1 / self.avg) + "/s"
-
-    @property
-    def pretty_eta(self):
-        if self.eta:
-            return "eta %s" % self.eta_td
-        return ""
-
-    def iter(self, it, n=1):
-        for x in it:
-            yield x
-            self.next(n)
-        self.finish()
-
-
-class WindowsMixin(object):
-
-    def __init__(self, *args, **kwargs):
-        # The Windows terminal does not support the hide/show cursor ANSI codes
-        # even with colorama. So we'll ensure that hide_cursor is False on
-        # Windows.
-        # This call needs to go before the super() call, so that hide_cursor
-        # is set in time. The base progress bar class writes the "hide cursor"
-        # code to the terminal in its init, so if we don't set this soon
-        # enough, we get a "hide" with no corresponding "show"...
-        if WINDOWS and self.hide_cursor:
-            self.hide_cursor = False
-
-        super(WindowsMixin, self).__init__(*args, **kwargs)
-
-        # Check if we are running on Windows and we have the colorama module,
-        # if we do then wrap our file with it.
-        if WINDOWS and colorama:
-            self.file = colorama.AnsiToWin32(self.file)
-            # The progress code expects to be able to call self.file.isatty()
-            # but the colorama.AnsiToWin32() object doesn't have that, so we'll
-            # add it.
-            self.file.isatty = lambda: self.file.wrapped.isatty()
-            # The progress code expects to be able to call self.file.flush()
-            # but the colorama.AnsiToWin32() object doesn't have that, so we'll
-            # add it.
-            self.file.flush = lambda: self.file.wrapped.flush()
-
-
-class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
-                              DownloadProgressMixin):
-
-    file = sys.stdout
-    message = "%(percent)d%%"
-    suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
-
-# NOTE: The "type: ignore" comments on the following classes are there to
-#       work around https://github.com/python/typing/issues/241
-
-
-class DefaultDownloadProgressBar(BaseDownloadProgressBar,
-                                 _BaseBar):
-    pass
-
-
-class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):  # type: ignore
-    pass
-
-
-class DownloadBar(BaseDownloadProgressBar,  # type: ignore
-                  Bar):
-    pass
-
-
-class DownloadFillingCirclesBar(BaseDownloadProgressBar,  # type: ignore
-                                FillingCirclesBar):
-    pass
-
-
-class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar,  # type: ignore
-                                   BlueEmojiBar):
-    pass
-
-
-class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
-                              DownloadProgressMixin, Spinner):
-
-    file = sys.stdout
-    suffix = "%(downloaded)s %(download_speed)s"
-
-    def next_phase(self):
-        if not hasattr(self, "_phaser"):
-            self._phaser = itertools.cycle(self.phases)
-        return next(self._phaser)
-
-    def update(self):
-        message = self.message % self
-        phase = self.next_phase()
-        suffix = self.suffix % self
-        line = ''.join([
-            message,
-            " " if message else "",
-            phase,
-            " " if suffix else "",
-            suffix,
-        ])
-
-        self.writeln(line)
-
-
-BAR_TYPES = {
-    "off": (DownloadSilentBar, DownloadSilentBar),
-    "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
-    "ascii": (DownloadBar, DownloadProgressSpinner),
-    "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
-    "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
-}
-
-
-def DownloadProgressProvider(progress_bar, max=None):
-    if max is None or max == 0:
-        return BAR_TYPES[progress_bar][1]().iter
-    else:
-        return BAR_TYPES[progress_bar][0](max=max).iter
-
-
-################################################################
-# Generic "something is happening" spinners
-#
-# We don't even try using progress.spinner.Spinner here because it's actually
-# simpler to reimplement from scratch than to coerce their code into doing
-# what we need.
-################################################################
-
-@contextlib.contextmanager
-def hidden_cursor(file):
-    # type: (IO) -> Iterator[None]
-    # The Windows terminal does not support the hide/show cursor ANSI codes,
-    # even via colorama. So don't even try.
-    if WINDOWS:
-        yield
-    # We don't want to clutter the output with control characters if we're
-    # writing to a file, or if the user is running with --quiet.
-    # See https://github.com/pypa/pip/issues/3418
-    elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
-        yield
-    else:
-        file.write(HIDE_CURSOR)
-        try:
-            yield
-        finally:
-            file.write(SHOW_CURSOR)
-
-
-class RateLimiter(object):
-    def __init__(self, min_update_interval_seconds):
-        # type: (float) -> None
-        self._min_update_interval_seconds = min_update_interval_seconds
-        self._last_update = 0  # type: float
-
-    def ready(self):
-        # type: () -> bool
-        now = time.time()
-        delta = now - self._last_update
-        return delta >= self._min_update_interval_seconds
-
-    def reset(self):
-        # type: () -> None
-        self._last_update = time.time()
-
-
-class SpinnerInterface(object):
-    def spin(self):
-        # type: () -> None
-        raise NotImplementedError()
-
-    def finish(self, final_status):
-        # type: (str) -> None
-        raise NotImplementedError()
-
-
-class InteractiveSpinner(SpinnerInterface):
-    def __init__(self, message, file=None, spin_chars="-\\|/",
-                 # Empirically, 8 updates/second looks nice
-                 min_update_interval_seconds=0.125):
-        self._message = message
-        if file is None:
-            file = sys.stdout
-        self._file = file
-        self._rate_limiter = RateLimiter(min_update_interval_seconds)
-        self._finished = False
-
-        self._spin_cycle = itertools.cycle(spin_chars)
-
-        self._file.write(" " * get_indentation() + self._message + " ... ")
-        self._width = 0
-
-    def _write(self, status):
-        assert not self._finished
-        # Erase what we wrote before by backspacing to the beginning, writing
-        # spaces to overwrite the old text, and then backspacing again
-        backup = "\b" * self._width
-        self._file.write(backup + " " * self._width + backup)
-        # Now we have a blank slate to add our status
-        self._file.write(status)
-        self._width = len(status)
-        self._file.flush()
-        self._rate_limiter.reset()
-
-    def spin(self):
-        # type: () -> None
-        if self._finished:
-            return
-        if not self._rate_limiter.ready():
-            return
-        self._write(next(self._spin_cycle))
-
-    def finish(self, final_status):
-        # type: (str) -> None
-        if self._finished:
-            return
-        self._write(final_status)
-        self._file.write("\n")
-        self._file.flush()
-        self._finished = True
-
-
-# Used for dumb terminals, non-interactive installs (no tty), etc.
-# We still print updates occasionally (once every 60 seconds by default) to
-# act as a keep-alive for systems like Travis-CI that take lack-of-output as
-# an indication that a task has frozen.
-class NonInteractiveSpinner(SpinnerInterface):
-    def __init__(self, message, min_update_interval_seconds=60):
-        # type: (str, float) -> None
-        self._message = message
-        self._finished = False
-        self._rate_limiter = RateLimiter(min_update_interval_seconds)
-        self._update("started")
-
-    def _update(self, status):
-        assert not self._finished
-        self._rate_limiter.reset()
-        logger.info("%s: %s", self._message, status)
-
-    def spin(self):
-        # type: () -> None
-        if self._finished:
-            return
-        if not self._rate_limiter.ready():
-            return
-        self._update("still running...")
-
-    def finish(self, final_status):
-        # type: (str) -> None
-        if self._finished:
-            return
-        self._update("finished with status '%s'" % (final_status,))
-        self._finished = True
-
-
-@contextlib.contextmanager
-def open_spinner(message):
-    # type: (str) -> Iterator[SpinnerInterface]
-    # Interactive spinner goes directly to sys.stdout rather than being routed
-    # through the logging system, but it acts like it has level INFO,
-    # i.e. it's only displayed if we're at level INFO or better.
-    # Non-interactive spinner goes through the logging system, so it is always
-    # in sync with logging configuration.
-    if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
-        spinner = InteractiveSpinner(message)  # type: SpinnerInterface
-    else:
-        spinner = NonInteractiveSpinner(message)
-    try:
-        with hidden_cursor(sys.stdout):
-            yield spinner
-    except KeyboardInterrupt:
-        spinner.finish("canceled")
-        raise
-    except Exception:
-        spinner.finish("error")
-        raise
-    else:
-        spinner.finish("done")
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/unpacking.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/unpacking.py
deleted file mode 100644
index 7252dc2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/unpacking.py
+++ /dev/null
@@ -1,272 +0,0 @@
-"""Utilities related archives.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-import shutil
-import stat
-import tarfile
-import zipfile
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.filetypes import (
-    BZ2_EXTENSIONS,
-    TAR_EXTENSIONS,
-    XZ_EXTENSIONS,
-    ZIP_EXTENSIONS,
-)
-from pip._internal.utils.misc import ensure_dir
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Iterable, List, Optional, Text, Union
-
-
-logger = logging.getLogger(__name__)
-
-
-SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
-
-try:
-    import bz2  # noqa
-    SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
-except ImportError:
-    logger.debug('bz2 module is not available')
-
-try:
-    # Only for Python 3.3+
-    import lzma  # noqa
-    SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
-except ImportError:
-    logger.debug('lzma module is not available')
-
-
-def current_umask():
-    """Get the current umask which involves having to set it temporarily."""
-    mask = os.umask(0)
-    os.umask(mask)
-    return mask
-
-
-def split_leading_dir(path):
-    # type: (Union[str, Text]) -> List[Union[str, Text]]
-    path = path.lstrip('/').lstrip('\\')
-    if (
-        '/' in path and (
-            ('\\' in path and path.find('/') < path.find('\\')) or
-            '\\' not in path
-        )
-    ):
-        return path.split('/', 1)
-    elif '\\' in path:
-        return path.split('\\', 1)
-    else:
-        return [path, '']
-
-
-def has_leading_dir(paths):
-    # type: (Iterable[Union[str, Text]]) -> bool
-    """Returns true if all the paths have the same leading path name
-    (i.e., everything is in one subdirectory in an archive)"""
-    common_prefix = None
-    for path in paths:
-        prefix, rest = split_leading_dir(path)
-        if not prefix:
-            return False
-        elif common_prefix is None:
-            common_prefix = prefix
-        elif prefix != common_prefix:
-            return False
-    return True
-
-
-def is_within_directory(directory, target):
-    # type: ((Union[str, Text]), (Union[str, Text])) -> bool
-    """
-    Return true if the absolute path of target is within the directory
-    """
-    abs_directory = os.path.abspath(directory)
-    abs_target = os.path.abspath(target)
-
-    prefix = os.path.commonprefix([abs_directory, abs_target])
-    return prefix == abs_directory
-
-
-def unzip_file(filename, location, flatten=True):
-    # type: (str, str, bool) -> None
-    """
-    Unzip the file (with path `filename`) to the destination `location`.  All
-    files are written based on system defaults and umask (i.e. permissions are
-    not preserved), except that regular file members with any execute
-    permissions (user, group, or world) have "chmod +x" applied after being
-    written. Note that for windows, any execute changes using os.chmod are
-    no-ops per the python docs.
-    """
-    ensure_dir(location)
-    zipfp = open(filename, 'rb')
-    try:
-        zip = zipfile.ZipFile(zipfp, allowZip64=True)
-        leading = has_leading_dir(zip.namelist()) and flatten
-        for info in zip.infolist():
-            name = info.filename
-            fn = name
-            if leading:
-                fn = split_leading_dir(name)[1]
-            fn = os.path.join(location, fn)
-            dir = os.path.dirname(fn)
-            if not is_within_directory(location, fn):
-                message = (
-                    'The zip file ({}) has a file ({}) trying to install '
-                    'outside target directory ({})'
-                )
-                raise InstallationError(message.format(filename, fn, location))
-            if fn.endswith('/') or fn.endswith('\\'):
-                # A directory
-                ensure_dir(fn)
-            else:
-                ensure_dir(dir)
-                # Don't use read() to avoid allocating an arbitrarily large
-                # chunk of memory for the file's content
-                fp = zip.open(name)
-                try:
-                    with open(fn, 'wb') as destfp:
-                        shutil.copyfileobj(fp, destfp)
-                finally:
-                    fp.close()
-                    mode = info.external_attr >> 16
-                    # if mode and regular file and any execute permissions for
-                    # user/group/world?
-                    if mode and stat.S_ISREG(mode) and mode & 0o111:
-                        # make dest file have execute for user/group/world
-                        # (chmod +x) no-op on windows per python docs
-                        os.chmod(fn, (0o777 - current_umask() | 0o111))
-    finally:
-        zipfp.close()
-
-
-def untar_file(filename, location):
-    # type: (str, str) -> None
-    """
-    Untar the file (with path `filename`) to the destination `location`.
-    All files are written based on system defaults and umask (i.e. permissions
-    are not preserved), except that regular file members with any execute
-    permissions (user, group, or world) have "chmod +x" applied after being
-    written.  Note that for windows, any execute changes using os.chmod are
-    no-ops per the python docs.
-    """
-    ensure_dir(location)
-    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
-        mode = 'r:gz'
-    elif filename.lower().endswith(BZ2_EXTENSIONS):
-        mode = 'r:bz2'
-    elif filename.lower().endswith(XZ_EXTENSIONS):
-        mode = 'r:xz'
-    elif filename.lower().endswith('.tar'):
-        mode = 'r'
-    else:
-        logger.warning(
-            'Cannot determine compression type for file %s', filename,
-        )
-        mode = 'r:*'
-    tar = tarfile.open(filename, mode)
-    try:
-        leading = has_leading_dir([
-            member.name for member in tar.getmembers()
-        ])
-        for member in tar.getmembers():
-            fn = member.name
-            if leading:
-                # https://github.com/python/mypy/issues/1174
-                fn = split_leading_dir(fn)[1]  # type: ignore
-            path = os.path.join(location, fn)
-            if not is_within_directory(location, path):
-                message = (
-                    'The tar file ({}) has a file ({}) trying to install '
-                    'outside target directory ({})'
-                )
-                raise InstallationError(
-                    message.format(filename, path, location)
-                )
-            if member.isdir():
-                ensure_dir(path)
-            elif member.issym():
-                try:
-                    # https://github.com/python/typeshed/issues/2673
-                    tar._extract_member(member, path)  # type: ignore
-                except Exception as exc:
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    logger.warning(
-                        'In the tar file %s the member %s is invalid: %s',
-                        filename, member.name, exc,
-                    )
-                    continue
-            else:
-                try:
-                    fp = tar.extractfile(member)
-                except (KeyError, AttributeError) as exc:
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    logger.warning(
-                        'In the tar file %s the member %s is invalid: %s',
-                        filename, member.name, exc,
-                    )
-                    continue
-                ensure_dir(os.path.dirname(path))
-                with open(path, 'wb') as destfp:
-                    shutil.copyfileobj(fp, destfp)
-                fp.close()
-                # Update the timestamp (useful for cython compiled files)
-                # https://github.com/python/typeshed/issues/2673
-                tar.utime(member, path)  # type: ignore
-                # member have any execute permissions for user/group/world?
-                if member.mode & 0o111:
-                    # make dest file have execute for user/group/world
-                    # no-op on windows per python docs
-                    os.chmod(path, (0o777 - current_umask() | 0o111))
-    finally:
-        tar.close()
-
-
-def unpack_file(
-        filename,  # type: str
-        location,  # type: str
-        content_type=None,  # type: Optional[str]
-):
-    # type: (...) -> None
-    filename = os.path.realpath(filename)
-    if (
-        content_type == 'application/zip' or
-        filename.lower().endswith(ZIP_EXTENSIONS) or
-        zipfile.is_zipfile(filename)
-    ):
-        unzip_file(
-            filename,
-            location,
-            flatten=not filename.endswith('.whl')
-        )
-    elif (
-        content_type == 'application/x-gzip' or
-        tarfile.is_tarfile(filename) or
-        filename.lower().endswith(
-            TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS
-        )
-    ):
-        untar_file(filename, location)
-    else:
-        # FIXME: handle?
-        # FIXME: magic signatures?
-        logger.critical(
-            'Cannot unpack file %s (downloaded from %s, content-type: %s); '
-            'cannot detect archive format',
-            filename, location, content_type,
-        )
-        raise InstallationError(
-            'Cannot determine archive format of {}'.format(location)
-        )
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/urls.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/urls.py
deleted file mode 100644
index 9ad40fe..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/urls.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import os
-import sys
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Text, Union
-
-
-def get_url_scheme(url):
-    # type: (Union[str, Text]) -> Optional[Text]
-    if ':' not in url:
-        return None
-    return url.split(':', 1)[0].lower()
-
-
-def path_to_url(path):
-    # type: (Union[str, Text]) -> str
-    """
-    Convert a path to a file: URL.  The path will be made absolute and have
-    quoted path parts.
-    """
-    path = os.path.normpath(os.path.abspath(path))
-    url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
-    return url
-
-
-def url_to_path(url):
-    # type: (str) -> str
-    """
-    Convert a file: URL to a path.
-    """
-    assert url.startswith('file:'), (
-        "You can only turn file: urls into filenames (not %r)" % url)
-
-    _, netloc, path, _, _ = urllib_parse.urlsplit(url)
-
-    if not netloc or netloc == 'localhost':
-        # According to RFC 8089, same as empty authority.
-        netloc = ''
-    elif sys.platform == 'win32':
-        # If we have a UNC path, prepend UNC share notation.
-        netloc = '\\\\' + netloc
-    else:
-        raise ValueError(
-            'non-local file URIs are not supported on this platform: %r'
-            % url
-        )
-
-    path = urllib_request.url2pathname(netloc + path)
-    return path
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
deleted file mode 100644
index 380db1c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/utils/virtualenv.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import os.path
-import site
-import sys
-
-
-def running_under_virtualenv():
-    # type: () -> bool
-    """
-    Return True if we're running inside a virtualenv, False otherwise.
-
-    """
-    if hasattr(sys, 'real_prefix'):
-        # pypa/virtualenv case
-        return True
-    elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
-        # PEP 405 venv
-        return True
-
-    return False
-
-
-def virtualenv_no_global():
-    # type: () -> bool
-    """
-    Return True if in a venv and no system site packages.
-    """
-    # this mirrors the logic in virtualenv.py for locating the
-    # no-global-site-packages.txt file
-    site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
-    no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
-    if running_under_virtualenv() and os.path.isfile(no_global_file):
-        return True
-    else:
-        return False
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
deleted file mode 100644
index 2a4eb13..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Expose a limited set of classes and functions so callers outside of
-# the vcs package don't need to import deeper than `pip._internal.vcs`.
-# (The test directory and imports protected by MYPY_CHECK_RUNNING may
-# still need to import from a vcs sub-package.)
-# Import all vcs modules to register each VCS in the VcsSupport object.
-import pip._internal.vcs.bazaar
-import pip._internal.vcs.git
-import pip._internal.vcs.mercurial
-import pip._internal.vcs.subversion  # noqa: F401
-from pip._internal.vcs.versioncontrol import (  # noqa: F401
-    RemoteNotFoundError,
-    is_url,
-    make_vcs_requirement_url,
-    vcs,
-)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
deleted file mode 100644
index 347c06f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/bazaar.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.utils.misc import display_path, rmtree
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs.versioncontrol import VersionControl, vcs
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Tuple
-    from pip._internal.utils.misc import HiddenText
-    from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
-
-
-logger = logging.getLogger(__name__)
-
-
-class Bazaar(VersionControl):
-    name = 'bzr'
-    dirname = '.bzr'
-    repo_name = 'branch'
-    schemes = (
-        'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
-        'bzr+lp',
-    )
-
-    def __init__(self, *args, **kwargs):
-        super(Bazaar, self).__init__(*args, **kwargs)
-        # This is only needed for python <2.7.5
-        # Register lp but do not expose as a scheme to support bzr+lp.
-        if getattr(urllib_parse, 'uses_fragment', None):
-            urllib_parse.uses_fragment.extend(['lp'])
-
-    @staticmethod
-    def get_base_rev_args(rev):
-        return ['-r', rev]
-
-    def export(self, location, url):
-        # type: (str, HiddenText) -> None
-        """
-        Export the Bazaar repository at the url to the destination location
-        """
-        # Remove the location to make sure Bazaar can export it correctly
-        if os.path.exists(location):
-            rmtree(location)
-
-        url, rev_options = self.get_url_rev_options(url)
-        self.run_command(
-            make_command('export', location, url, rev_options.to_args()),
-            show_stdout=False,
-        )
-
-    def fetch_new(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        rev_display = rev_options.to_display()
-        logger.info(
-            'Checking out %s%s to %s',
-            url,
-            rev_display,
-            display_path(dest),
-        )
-        cmd_args = (
-            make_command('branch', '-q', rev_options.to_args(), url, dest)
-        )
-        self.run_command(cmd_args)
-
-    def switch(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        self.run_command(make_command('switch', url), cwd=dest)
-
-    def update(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        cmd_args = make_command('pull', '-q', rev_options.to_args())
-        self.run_command(cmd_args, cwd=dest)
-
-    @classmethod
-    def get_url_rev_and_auth(cls, url):
-        # type: (str) -> Tuple[str, Optional[str], AuthInfo]
-        # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
-        url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url)
-        if url.startswith('ssh://'):
-            url = 'bzr+' + url
-        return url, rev, user_pass
-
-    @classmethod
-    def get_remote_url(cls, location):
-        urls = cls.run_command(['info'], show_stdout=False, cwd=location)
-        for line in urls.splitlines():
-            line = line.strip()
-            for x in ('checkout of branch: ',
-                      'parent branch: '):
-                if line.startswith(x):
-                    repo = line.split(x)[1]
-                    if cls._is_local_repository(repo):
-                        return path_to_url(repo)
-                    return repo
-        return None
-
-    @classmethod
-    def get_revision(cls, location):
-        revision = cls.run_command(
-            ['revno'], show_stdout=False, cwd=location,
-        )
-        return revision.splitlines()[-1]
-
-    @classmethod
-    def is_commit_id_equal(cls, dest, name):
-        """Always assume the versions don't match"""
-        return False
-
-
-vcs.register(Bazaar)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/git.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/git.py
deleted file mode 100644
index 92b8457..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/git.py
+++ /dev/null
@@ -1,372 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os.path
-import re
-
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-from pip._vendor.six.moves.urllib import request as urllib_request
-
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs.versioncontrol import (
-    RemoteNotFoundError,
-    VersionControl,
-    find_path_to_setup_from_repo_root,
-    vcs,
-)
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Tuple
-    from pip._internal.utils.misc import HiddenText
-    from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
-
-
-urlsplit = urllib_parse.urlsplit
-urlunsplit = urllib_parse.urlunsplit
-
-
-logger = logging.getLogger(__name__)
-
-
-HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$')
-
-
-def looks_like_hash(sha):
-    return bool(HASH_REGEX.match(sha))
-
-
-class Git(VersionControl):
-    name = 'git'
-    dirname = '.git'
-    repo_name = 'clone'
-    schemes = (
-        'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
-    )
-    # Prevent the user's environment variables from interfering with pip:
-    # https://github.com/pypa/pip/issues/1130
-    unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
-    default_arg_rev = 'HEAD'
-
-    @staticmethod
-    def get_base_rev_args(rev):
-        return [rev]
-
-    def get_git_version(self):
-        VERSION_PFX = 'git version '
-        version = self.run_command(['version'], show_stdout=False)
-        if version.startswith(VERSION_PFX):
-            version = version[len(VERSION_PFX):].split()[0]
-        else:
-            version = ''
-        # get first 3 positions of the git version because
-        # on windows it is x.y.z.windows.t, and this parses as
-        # LegacyVersion which always smaller than a Version.
-        version = '.'.join(version.split('.')[:3])
-        return parse_version(version)
-
-    @classmethod
-    def get_current_branch(cls, location):
-        """
-        Return the current branch, or None if HEAD isn't at a branch
-        (e.g. detached HEAD).
-        """
-        # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
-        # HEAD rather than a symbolic ref.  In addition, the -q causes the
-        # command to exit with status code 1 instead of 128 in this case
-        # and to suppress the message to stderr.
-        args = ['symbolic-ref', '-q', 'HEAD']
-        output = cls.run_command(
-            args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
-        )
-        ref = output.strip()
-
-        if ref.startswith('refs/heads/'):
-            return ref[len('refs/heads/'):]
-
-        return None
-
-    def export(self, location, url):
-        # type: (str, HiddenText) -> None
-        """Export the Git repository at the url to the destination location"""
-        if not location.endswith('/'):
-            location = location + '/'
-
-        with TempDirectory(kind="export") as temp_dir:
-            self.unpack(temp_dir.path, url=url)
-            self.run_command(
-                ['checkout-index', '-a', '-f', '--prefix', location],
-                show_stdout=False, cwd=temp_dir.path
-            )
-
-    @classmethod
-    def get_revision_sha(cls, dest, rev):
-        """
-        Return (sha_or_none, is_branch), where sha_or_none is a commit hash
-        if the revision names a remote branch or tag, otherwise None.
-
-        Args:
-          dest: the repository directory.
-          rev: the revision name.
-        """
-        # Pass rev to pre-filter the list.
-        output = cls.run_command(['show-ref', rev], cwd=dest,
-                                 show_stdout=False, on_returncode='ignore')
-        refs = {}
-        for line in output.strip().splitlines():
-            try:
-                sha, ref = line.split()
-            except ValueError:
-                # Include the offending line to simplify troubleshooting if
-                # this error ever occurs.
-                raise ValueError('unexpected show-ref line: {!r}'.format(line))
-
-            refs[ref] = sha
-
-        branch_ref = 'refs/remotes/origin/{}'.format(rev)
-        tag_ref = 'refs/tags/{}'.format(rev)
-
-        sha = refs.get(branch_ref)
-        if sha is not None:
-            return (sha, True)
-
-        sha = refs.get(tag_ref)
-
-        return (sha, False)
-
-    @classmethod
-    def resolve_revision(cls, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> RevOptions
-        """
-        Resolve a revision to a new RevOptions object with the SHA1 of the
-        branch, tag, or ref if found.
-
-        Args:
-          rev_options: a RevOptions object.
-        """
-        rev = rev_options.arg_rev
-        # The arg_rev property's implementation for Git ensures that the
-        # rev return value is always non-None.
-        assert rev is not None
-
-        sha, is_branch = cls.get_revision_sha(dest, rev)
-
-        if sha is not None:
-            rev_options = rev_options.make_new(sha)
-            rev_options.branch_name = rev if is_branch else None
-
-            return rev_options
-
-        # Do not show a warning for the common case of something that has
-        # the form of a Git commit hash.
-        if not looks_like_hash(rev):
-            logger.warning(
-                "Did not find branch or tag '%s', assuming revision or ref.",
-                rev,
-            )
-
-        if not rev.startswith('refs/'):
-            return rev_options
-
-        # If it looks like a ref, we have to fetch it explicitly.
-        cls.run_command(
-            make_command('fetch', '-q', url, rev_options.to_args()),
-            cwd=dest,
-        )
-        # Change the revision to the SHA of the ref we fetched
-        sha = cls.get_revision(dest, rev='FETCH_HEAD')
-        rev_options = rev_options.make_new(sha)
-
-        return rev_options
-
-    @classmethod
-    def is_commit_id_equal(cls, dest, name):
-        """
-        Return whether the current commit hash equals the given name.
-
-        Args:
-          dest: the repository directory.
-          name: a string name.
-        """
-        if not name:
-            # Then avoid an unnecessary subprocess call.
-            return False
-
-        return cls.get_revision(dest) == name
-
-    def fetch_new(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        rev_display = rev_options.to_display()
-        logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest))
-        self.run_command(make_command('clone', '-q', url, dest))
-
-        if rev_options.rev:
-            # Then a specific revision was requested.
-            rev_options = self.resolve_revision(dest, url, rev_options)
-            branch_name = getattr(rev_options, 'branch_name', None)
-            if branch_name is None:
-                # Only do a checkout if the current commit id doesn't match
-                # the requested revision.
-                if not self.is_commit_id_equal(dest, rev_options.rev):
-                    cmd_args = make_command(
-                        'checkout', '-q', rev_options.to_args(),
-                    )
-                    self.run_command(cmd_args, cwd=dest)
-            elif self.get_current_branch(dest) != branch_name:
-                # Then a specific branch was requested, and that branch
-                # is not yet checked out.
-                track_branch = 'origin/{}'.format(branch_name)
-                cmd_args = [
-                    'checkout', '-b', branch_name, '--track', track_branch,
-                ]
-                self.run_command(cmd_args, cwd=dest)
-
-        #: repo may contain submodules
-        self.update_submodules(dest)
-
-    def switch(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        self.run_command(
-            make_command('config', 'remote.origin.url', url),
-            cwd=dest,
-        )
-        cmd_args = make_command('checkout', '-q', rev_options.to_args())
-        self.run_command(cmd_args, cwd=dest)
-
-        self.update_submodules(dest)
-
-    def update(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        # First fetch changes from the default remote
-        if self.get_git_version() >= parse_version('1.9.0'):
-            # fetch tags in addition to everything else
-            self.run_command(['fetch', '-q', '--tags'], cwd=dest)
-        else:
-            self.run_command(['fetch', '-q'], cwd=dest)
-        # Then reset to wanted revision (maybe even origin/master)
-        rev_options = self.resolve_revision(dest, url, rev_options)
-        cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args())
-        self.run_command(cmd_args, cwd=dest)
-        #: update submodules
-        self.update_submodules(dest)
-
-    @classmethod
-    def get_remote_url(cls, location):
-        """
-        Return URL of the first remote encountered.
-
-        Raises RemoteNotFoundError if the repository does not have a remote
-        url configured.
-        """
-        # We need to pass 1 for extra_ok_returncodes since the command
-        # exits with return code 1 if there are no matching lines.
-        stdout = cls.run_command(
-            ['config', '--get-regexp', r'remote\..*\.url'],
-            extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
-        )
-        remotes = stdout.splitlines()
-        try:
-            found_remote = remotes[0]
-        except IndexError:
-            raise RemoteNotFoundError
-
-        for remote in remotes:
-            if remote.startswith('remote.origin.url '):
-                found_remote = remote
-                break
-        url = found_remote.split(' ')[1]
-        return url.strip()
-
-    @classmethod
-    def get_revision(cls, location, rev=None):
-        if rev is None:
-            rev = 'HEAD'
-        current_rev = cls.run_command(
-            ['rev-parse', rev], show_stdout=False, cwd=location,
-        )
-        return current_rev.strip()
-
-    @classmethod
-    def get_subdirectory(cls, location):
-        """
-        Return the path to setup.py, relative to the repo root.
-        Return None if setup.py is in the repo root.
-        """
-        # find the repo root
-        git_dir = cls.run_command(
-            ['rev-parse', '--git-dir'],
-            show_stdout=False, cwd=location).strip()
-        if not os.path.isabs(git_dir):
-            git_dir = os.path.join(location, git_dir)
-        repo_root = os.path.abspath(os.path.join(git_dir, '..'))
-        return find_path_to_setup_from_repo_root(location, repo_root)
-
-    @classmethod
-    def get_url_rev_and_auth(cls, url):
-        # type: (str) -> Tuple[str, Optional[str], AuthInfo]
-        """
-        Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
-        That's required because although they use SSH they sometimes don't
-        work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
-        parsing. Hence we remove it again afterwards and return it as a stub.
-        """
-        # Works around an apparent Git bug
-        # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
-        scheme, netloc, path, query, fragment = urlsplit(url)
-        if scheme.endswith('file'):
-            initial_slashes = path[:-len(path.lstrip('/'))]
-            newpath = (
-                initial_slashes +
-                urllib_request.url2pathname(path)
-                .replace('\\', '/').lstrip('/')
-            )
-            url = urlunsplit((scheme, netloc, newpath, query, fragment))
-            after_plus = scheme.find('+') + 1
-            url = scheme[:after_plus] + urlunsplit(
-                (scheme[after_plus:], netloc, newpath, query, fragment),
-            )
-
-        if '://' not in url:
-            assert 'file:' not in url
-            url = url.replace('git+', 'git+ssh://')
-            url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
-            url = url.replace('ssh://', '')
-        else:
-            url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url)
-
-        return url, rev, user_pass
-
-    @classmethod
-    def update_submodules(cls, location):
-        if not os.path.exists(os.path.join(location, '.gitmodules')):
-            return
-        cls.run_command(
-            ['submodule', 'update', '--init', '--recursive', '-q'],
-            cwd=location,
-        )
-
-    @classmethod
-    def controls_location(cls, location):
-        if super(Git, cls).controls_location(location):
-            return True
-        try:
-            r = cls.run_command(['rev-parse'],
-                                cwd=location,
-                                show_stdout=False,
-                                on_returncode='ignore',
-                                log_failed_cmd=False)
-            return not r
-        except BadCommand:
-            logger.debug("could not determine if %s is under git control "
-                         "because git is not available", location)
-            return False
-
-
-vcs.register(Git)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
deleted file mode 100644
index d9b58cf..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/mercurial.py
+++ /dev/null
@@ -1,155 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-
-from pip._vendor.six.moves import configparser
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.utils.misc import display_path
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs.versioncontrol import (
-    VersionControl,
-    find_path_to_setup_from_repo_root,
-    vcs,
-)
-
-if MYPY_CHECK_RUNNING:
-    from pip._internal.utils.misc import HiddenText
-    from pip._internal.vcs.versioncontrol import RevOptions
-
-
-logger = logging.getLogger(__name__)
-
-
-class Mercurial(VersionControl):
-    name = 'hg'
-    dirname = '.hg'
-    repo_name = 'clone'
-    schemes = (
-        'hg', 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http',
-    )
-
-    @staticmethod
-    def get_base_rev_args(rev):
-        return [rev]
-
-    def export(self, location, url):
-        # type: (str, HiddenText) -> None
-        """Export the Hg repository at the url to the destination location"""
-        with TempDirectory(kind="export") as temp_dir:
-            self.unpack(temp_dir.path, url=url)
-
-            self.run_command(
-                ['archive', location], show_stdout=False, cwd=temp_dir.path
-            )
-
-    def fetch_new(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        rev_display = rev_options.to_display()
-        logger.info(
-            'Cloning hg %s%s to %s',
-            url,
-            rev_display,
-            display_path(dest),
-        )
-        self.run_command(make_command('clone', '--noupdate', '-q', url, dest))
-        self.run_command(
-            make_command('update', '-q', rev_options.to_args()),
-            cwd=dest,
-        )
-
-    def switch(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        repo_config = os.path.join(dest, self.dirname, 'hgrc')
-        config = configparser.RawConfigParser()
-        try:
-            config.read(repo_config)
-            config.set('paths', 'default', url.secret)
-            with open(repo_config, 'w') as config_file:
-                config.write(config_file)
-        except (OSError, configparser.NoSectionError) as exc:
-            logger.warning(
-                'Could not switch Mercurial repository to %s: %s', url, exc,
-            )
-        else:
-            cmd_args = make_command('update', '-q', rev_options.to_args())
-            self.run_command(cmd_args, cwd=dest)
-
-    def update(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        self.run_command(['pull', '-q'], cwd=dest)
-        cmd_args = make_command('update', '-q', rev_options.to_args())
-        self.run_command(cmd_args, cwd=dest)
-
-    @classmethod
-    def get_remote_url(cls, location):
-        url = cls.run_command(
-            ['showconfig', 'paths.default'],
-            show_stdout=False, cwd=location).strip()
-        if cls._is_local_repository(url):
-            url = path_to_url(url)
-        return url.strip()
-
-    @classmethod
-    def get_revision(cls, location):
-        """
-        Return the repository-local changeset revision number, as an integer.
-        """
-        current_revision = cls.run_command(
-            ['parents', '--template={rev}'],
-            show_stdout=False, cwd=location).strip()
-        return current_revision
-
-    @classmethod
-    def get_requirement_revision(cls, location):
-        """
-        Return the changeset identification hash, as a 40-character
-        hexadecimal string
-        """
-        current_rev_hash = cls.run_command(
-            ['parents', '--template={node}'],
-            show_stdout=False, cwd=location).strip()
-        return current_rev_hash
-
-    @classmethod
-    def is_commit_id_equal(cls, dest, name):
-        """Always assume the versions don't match"""
-        return False
-
-    @classmethod
-    def get_subdirectory(cls, location):
-        """
-        Return the path to setup.py, relative to the repo root.
-        Return None if setup.py is in the repo root.
-        """
-        # find the repo root
-        repo_root = cls.run_command(
-            ['root'], show_stdout=False, cwd=location).strip()
-        if not os.path.isabs(repo_root):
-            repo_root = os.path.abspath(os.path.join(location, repo_root))
-        return find_path_to_setup_from_repo_root(location, repo_root)
-
-    @classmethod
-    def controls_location(cls, location):
-        if super(Mercurial, cls).controls_location(location):
-            return True
-        try:
-            cls.run_command(
-                ['identify'],
-                cwd=location,
-                show_stdout=False,
-                on_returncode='raise',
-                log_failed_cmd=False)
-            return True
-        except (BadCommand, InstallationError):
-            return False
-
-
-vcs.register(Mercurial)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
deleted file mode 100644
index 6c76d1a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/subversion.py
+++ /dev/null
@@ -1,333 +0,0 @@
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import logging
-import os
-import re
-
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
-    display_path,
-    is_console_interactive,
-    rmtree,
-    split_auth_from_netloc,
-)
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.vcs.versioncontrol import VersionControl, vcs
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
-_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
-_svn_info_xml_url_re = re.compile(r'(.*)')
-
-
-if MYPY_CHECK_RUNNING:
-    from typing import Optional, Tuple
-    from pip._internal.utils.subprocess import CommandArgs
-    from pip._internal.utils.misc import HiddenText
-    from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions
-
-
-logger = logging.getLogger(__name__)
-
-
-class Subversion(VersionControl):
-    name = 'svn'
-    dirname = '.svn'
-    repo_name = 'checkout'
-    schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
-
-    @classmethod
-    def should_add_vcs_url_prefix(cls, remote_url):
-        return True
-
-    @staticmethod
-    def get_base_rev_args(rev):
-        return ['-r', rev]
-
-    @classmethod
-    def get_revision(cls, location):
-        """
-        Return the maximum revision for all files under a given location
-        """
-        # Note: taken from setuptools.command.egg_info
-        revision = 0
-
-        for base, dirs, files in os.walk(location):
-            if cls.dirname not in dirs:
-                dirs[:] = []
-                continue    # no sense walking uncontrolled subdirs
-            dirs.remove(cls.dirname)
-            entries_fn = os.path.join(base, cls.dirname, 'entries')
-            if not os.path.exists(entries_fn):
-                # FIXME: should we warn?
-                continue
-
-            dirurl, localrev = cls._get_svn_url_rev(base)
-
-            if base == location:
-                base = dirurl + '/'   # save the root url
-            elif not dirurl or not dirurl.startswith(base):
-                dirs[:] = []
-                continue    # not part of the same svn tree, skip it
-            revision = max(revision, localrev)
-        return revision
-
-    @classmethod
-    def get_netloc_and_auth(cls, netloc, scheme):
-        """
-        This override allows the auth information to be passed to svn via the
-        --username and --password options instead of via the URL.
-        """
-        if scheme == 'ssh':
-            # The --username and --password options can't be used for
-            # svn+ssh URLs, so keep the auth information in the URL.
-            return super(Subversion, cls).get_netloc_and_auth(netloc, scheme)
-
-        return split_auth_from_netloc(netloc)
-
-    @classmethod
-    def get_url_rev_and_auth(cls, url):
-        # type: (str) -> Tuple[str, Optional[str], AuthInfo]
-        # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
-        url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url)
-        if url.startswith('ssh://'):
-            url = 'svn+' + url
-        return url, rev, user_pass
-
-    @staticmethod
-    def make_rev_args(username, password):
-        # type: (Optional[str], Optional[HiddenText]) -> CommandArgs
-        extra_args = []  # type: CommandArgs
-        if username:
-            extra_args += ['--username', username]
-        if password:
-            extra_args += ['--password', password]
-
-        return extra_args
-
-    @classmethod
-    def get_remote_url(cls, location):
-        # In cases where the source is in a subdirectory, not alongside
-        # setup.py we have to look up in the location until we find a real
-        # setup.py
-        orig_location = location
-        while not os.path.exists(os.path.join(location, 'setup.py')):
-            last_location = location
-            location = os.path.dirname(location)
-            if location == last_location:
-                # We've traversed up to the root of the filesystem without
-                # finding setup.py
-                logger.warning(
-                    "Could not find setup.py for directory %s (tried all "
-                    "parent directories)",
-                    orig_location,
-                )
-                return None
-
-        return cls._get_svn_url_rev(location)[0]
-
-    @classmethod
-    def _get_svn_url_rev(cls, location):
-        from pip._internal.exceptions import InstallationError
-
-        entries_path = os.path.join(location, cls.dirname, 'entries')
-        if os.path.exists(entries_path):
-            with open(entries_path) as f:
-                data = f.read()
-        else:  # subversion >= 1.7 does not have the 'entries' file
-            data = ''
-
-        if (data.startswith('8') or
-                data.startswith('9') or
-                data.startswith('10')):
-            data = list(map(str.splitlines, data.split('\n\x0c\n')))
-            del data[0][0]  # get rid of the '8'
-            url = data[0][3]
-            revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
-        elif data.startswith('= 1.7
-                # Note that using get_remote_call_options is not necessary here
-                # because `svn info` is being run against a local directory.
-                # We don't need to worry about making sure interactive mode
-                # is being used to prompt for passwords, because passwords
-                # are only potentially needed for remote server requests.
-                xml = cls.run_command(
-                    ['info', '--xml', location],
-                    show_stdout=False,
-                )
-                url = _svn_info_xml_url_re.search(xml).group(1)
-                revs = [
-                    int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
-                ]
-            except InstallationError:
-                url, revs = None, []
-
-        if revs:
-            rev = max(revs)
-        else:
-            rev = 0
-
-        return url, rev
-
-    @classmethod
-    def is_commit_id_equal(cls, dest, name):
-        """Always assume the versions don't match"""
-        return False
-
-    def __init__(self, use_interactive=None):
-        # type: (bool) -> None
-        if use_interactive is None:
-            use_interactive = is_console_interactive()
-        self.use_interactive = use_interactive
-
-        # This member is used to cache the fetched version of the current
-        # ``svn`` client.
-        # Special value definitions:
-        #   None: Not evaluated yet.
-        #   Empty tuple: Could not parse version.
-        self._vcs_version = None  # type: Optional[Tuple[int, ...]]
-
-        super(Subversion, self).__init__()
-
-    def call_vcs_version(self):
-        # type: () -> Tuple[int, ...]
-        """Query the version of the currently installed Subversion client.
-
-        :return: A tuple containing the parts of the version information or
-            ``()`` if the version returned from ``svn`` could not be parsed.
-        :raises: BadCommand: If ``svn`` is not installed.
-        """
-        # Example versions:
-        #   svn, version 1.10.3 (r1842928)
-        #      compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
-        #   svn, version 1.7.14 (r1542130)
-        #      compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
-        version_prefix = 'svn, version '
-        version = self.run_command(['--version'], show_stdout=False)
-        if not version.startswith(version_prefix):
-            return ()
-
-        version = version[len(version_prefix):].split()[0]
-        version_list = version.split('.')
-        try:
-            parsed_version = tuple(map(int, version_list))
-        except ValueError:
-            return ()
-
-        return parsed_version
-
-    def get_vcs_version(self):
-        # type: () -> Tuple[int, ...]
-        """Return the version of the currently installed Subversion client.
-
-        If the version of the Subversion client has already been queried,
-        a cached value will be used.
-
-        :return: A tuple containing the parts of the version information or
-            ``()`` if the version returned from ``svn`` could not be parsed.
-        :raises: BadCommand: If ``svn`` is not installed.
-        """
-        if self._vcs_version is not None:
-            # Use cached version, if available.
-            # If parsing the version failed previously (empty tuple),
-            # do not attempt to parse it again.
-            return self._vcs_version
-
-        vcs_version = self.call_vcs_version()
-        self._vcs_version = vcs_version
-        return vcs_version
-
-    def get_remote_call_options(self):
-        # type: () -> CommandArgs
-        """Return options to be used on calls to Subversion that contact the server.
-
-        These options are applicable for the following ``svn`` subcommands used
-        in this class.
-
-            - checkout
-            - export
-            - switch
-            - update
-
-        :return: A list of command line arguments to pass to ``svn``.
-        """
-        if not self.use_interactive:
-            # --non-interactive switch is available since Subversion 0.14.4.
-            # Subversion < 1.8 runs in interactive mode by default.
-            return ['--non-interactive']
-
-        svn_version = self.get_vcs_version()
-        # By default, Subversion >= 1.8 runs in non-interactive mode if
-        # stdin is not a TTY. Since that is how pip invokes SVN, in
-        # call_subprocess(), pip must pass --force-interactive to ensure
-        # the user can be prompted for a password, if required.
-        #   SVN added the --force-interactive option in SVN 1.8. Since
-        # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
-        # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
-        # can't safely add the option if the SVN version is < 1.8 (or unknown).
-        if svn_version >= (1, 8):
-            return ['--force-interactive']
-
-        return []
-
-    def export(self, location, url):
-        # type: (str, HiddenText) -> None
-        """Export the svn repository at the url to the destination location"""
-        url, rev_options = self.get_url_rev_options(url)
-
-        logger.info('Exporting svn repository %s to %s', url, location)
-        with indent_log():
-            if os.path.exists(location):
-                # Subversion doesn't like to check out over an existing
-                # directory --force fixes this, but was only added in svn 1.5
-                rmtree(location)
-            cmd_args = make_command(
-                'export', self.get_remote_call_options(),
-                rev_options.to_args(), url, location,
-            )
-            self.run_command(cmd_args, show_stdout=False)
-
-    def fetch_new(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        rev_display = rev_options.to_display()
-        logger.info(
-            'Checking out %s%s to %s',
-            url,
-            rev_display,
-            display_path(dest),
-        )
-        cmd_args = make_command(
-            'checkout', '-q', self.get_remote_call_options(),
-            rev_options.to_args(), url, dest,
-        )
-        self.run_command(cmd_args)
-
-    def switch(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        cmd_args = make_command(
-            'switch', self.get_remote_call_options(), rev_options.to_args(),
-            url, dest,
-        )
-        self.run_command(cmd_args)
-
-    def update(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        cmd_args = make_command(
-            'update', self.get_remote_call_options(), rev_options.to_args(),
-            dest,
-        )
-        self.run_command(cmd_args)
-
-
-vcs.register(Subversion)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
deleted file mode 100644
index 9038ace..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/vcs/versioncontrol.py
+++ /dev/null
@@ -1,665 +0,0 @@
-"""Handles all VCS (version control) support"""
-
-# The following comment should be removed at some point in the future.
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import errno
-import logging
-import os
-import shutil
-import sys
-
-from pip._vendor import pkg_resources
-from pip._vendor.six.moves.urllib import parse as urllib_parse
-
-from pip._internal.exceptions import BadCommand
-from pip._internal.utils.compat import samefile
-from pip._internal.utils.misc import (
-    ask_path_exists,
-    backup_dir,
-    display_path,
-    hide_url,
-    hide_value,
-    rmtree,
-)
-from pip._internal.utils.subprocess import call_subprocess, make_command
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.urls import get_url_scheme
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type, Union
-    )
-    from pip._internal.utils.ui import SpinnerInterface
-    from pip._internal.utils.misc import HiddenText
-    from pip._internal.utils.subprocess import CommandArgs
-
-    AuthInfo = Tuple[Optional[str], Optional[str]]
-
-
-__all__ = ['vcs']
-
-
-logger = logging.getLogger(__name__)
-
-
-def is_url(name):
-    # type: (Union[str, Text]) -> bool
-    """
-    Return true if the name looks like a URL.
-    """
-    scheme = get_url_scheme(name)
-    if scheme is None:
-        return False
-    return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
-
-
-def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None):
-    """
-    Return the URL for a VCS requirement.
-
-    Args:
-      repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
-      project_name: the (unescaped) project name.
-    """
-    egg_project_name = pkg_resources.to_filename(project_name)
-    req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name)
-    if subdir:
-        req += '&subdirectory={}'.format(subdir)
-
-    return req
-
-
-def find_path_to_setup_from_repo_root(location, repo_root):
-    """
-    Find the path to `setup.py` by searching up the filesystem from `location`.
-    Return the path to `setup.py` relative to `repo_root`.
-    Return None if `setup.py` is in `repo_root` or cannot be found.
-    """
-    # find setup.py
-    orig_location = location
-    while not os.path.exists(os.path.join(location, 'setup.py')):
-        last_location = location
-        location = os.path.dirname(location)
-        if location == last_location:
-            # We've traversed up to the root of the filesystem without
-            # finding setup.py
-            logger.warning(
-                "Could not find setup.py for directory %s (tried all "
-                "parent directories)",
-                orig_location,
-            )
-            return None
-
-    if samefile(repo_root, location):
-        return None
-
-    return os.path.relpath(location, repo_root)
-
-
-class RemoteNotFoundError(Exception):
-    pass
-
-
-class RevOptions(object):
-
-    """
-    Encapsulates a VCS-specific revision to install, along with any VCS
-    install options.
-
-    Instances of this class should be treated as if immutable.
-    """
-
-    def __init__(
-        self,
-        vc_class,  # type: Type[VersionControl]
-        rev=None,  # type: Optional[str]
-        extra_args=None,  # type: Optional[CommandArgs]
-    ):
-        # type: (...) -> None
-        """
-        Args:
-          vc_class: a VersionControl subclass.
-          rev: the name of the revision to install.
-          extra_args: a list of extra options.
-        """
-        if extra_args is None:
-            extra_args = []
-
-        self.extra_args = extra_args
-        self.rev = rev
-        self.vc_class = vc_class
-        self.branch_name = None  # type: Optional[str]
-
-    def __repr__(self):
-        return ''.format(self.vc_class.name, self.rev)
-
-    @property
-    def arg_rev(self):
-        # type: () -> Optional[str]
-        if self.rev is None:
-            return self.vc_class.default_arg_rev
-
-        return self.rev
-
-    def to_args(self):
-        # type: () -> CommandArgs
-        """
-        Return the VCS-specific command arguments.
-        """
-        args = []  # type: CommandArgs
-        rev = self.arg_rev
-        if rev is not None:
-            args += self.vc_class.get_base_rev_args(rev)
-        args += self.extra_args
-
-        return args
-
-    def to_display(self):
-        # type: () -> str
-        if not self.rev:
-            return ''
-
-        return ' (to revision {})'.format(self.rev)
-
-    def make_new(self, rev):
-        # type: (str) -> RevOptions
-        """
-        Make a copy of the current instance, but with a new rev.
-
-        Args:
-          rev: the name of the revision for the new object.
-        """
-        return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
-
-
-class VcsSupport(object):
-    _registry = {}  # type: Dict[str, VersionControl]
-    schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
-
-    def __init__(self):
-        # type: () -> None
-        # Register more schemes with urlparse for various version control
-        # systems
-        urllib_parse.uses_netloc.extend(self.schemes)
-        # Python >= 2.7.4, 3.3 doesn't have uses_fragment
-        if getattr(urllib_parse, 'uses_fragment', None):
-            urllib_parse.uses_fragment.extend(self.schemes)
-        super(VcsSupport, self).__init__()
-
-    def __iter__(self):
-        return self._registry.__iter__()
-
-    @property
-    def backends(self):
-        # type: () -> List[VersionControl]
-        return list(self._registry.values())
-
-    @property
-    def dirnames(self):
-        # type: () -> List[str]
-        return [backend.dirname for backend in self.backends]
-
-    @property
-    def all_schemes(self):
-        # type: () -> List[str]
-        schemes = []  # type: List[str]
-        for backend in self.backends:
-            schemes.extend(backend.schemes)
-        return schemes
-
-    def register(self, cls):
-        # type: (Type[VersionControl]) -> None
-        if not hasattr(cls, 'name'):
-            logger.warning('Cannot register VCS %s', cls.__name__)
-            return
-        if cls.name not in self._registry:
-            self._registry[cls.name] = cls()
-            logger.debug('Registered VCS backend: %s', cls.name)
-
-    def unregister(self, name):
-        # type: (str) -> None
-        if name in self._registry:
-            del self._registry[name]
-
-    def get_backend_for_dir(self, location):
-        # type: (str) -> Optional[VersionControl]
-        """
-        Return a VersionControl object if a repository of that type is found
-        at the given directory.
-        """
-        for vcs_backend in self._registry.values():
-            if vcs_backend.controls_location(location):
-                logger.debug('Determine that %s uses VCS: %s',
-                             location, vcs_backend.name)
-                return vcs_backend
-        return None
-
-    def get_backend(self, name):
-        # type: (str) -> Optional[VersionControl]
-        """
-        Return a VersionControl object or None.
-        """
-        name = name.lower()
-        return self._registry.get(name)
-
-
-vcs = VcsSupport()
-
-
-class VersionControl(object):
-    name = ''
-    dirname = ''
-    repo_name = ''
-    # List of supported schemes for this Version Control
-    schemes = ()  # type: Tuple[str, ...]
-    # Iterable of environment variable names to pass to call_subprocess().
-    unset_environ = ()  # type: Tuple[str, ...]
-    default_arg_rev = None  # type: Optional[str]
-
-    @classmethod
-    def should_add_vcs_url_prefix(cls, remote_url):
-        """
-        Return whether the vcs prefix (e.g. "git+") should be added to a
-        repository's remote url when used in a requirement.
-        """
-        return not remote_url.lower().startswith('{}:'.format(cls.name))
-
-    @classmethod
-    def get_subdirectory(cls, location):
-        """
-        Return the path to setup.py, relative to the repo root.
-        Return None if setup.py is in the repo root.
-        """
-        return None
-
-    @classmethod
-    def get_requirement_revision(cls, repo_dir):
-        """
-        Return the revision string that should be used in a requirement.
-        """
-        return cls.get_revision(repo_dir)
-
-    @classmethod
-    def get_src_requirement(cls, repo_dir, project_name):
-        """
-        Return the requirement string to use to redownload the files
-        currently at the given repository directory.
-
-        Args:
-          project_name: the (unescaped) project name.
-
-        The return value has a form similar to the following:
-
-            {repository_url}@{revision}#egg={project_name}
-        """
-        repo_url = cls.get_remote_url(repo_dir)
-        if repo_url is None:
-            return None
-
-        if cls.should_add_vcs_url_prefix(repo_url):
-            repo_url = '{}+{}'.format(cls.name, repo_url)
-
-        revision = cls.get_requirement_revision(repo_dir)
-        subdir = cls.get_subdirectory(repo_dir)
-        req = make_vcs_requirement_url(repo_url, revision, project_name,
-                                       subdir=subdir)
-
-        return req
-
-    @staticmethod
-    def get_base_rev_args(rev):
-        """
-        Return the base revision arguments for a vcs command.
-
-        Args:
-          rev: the name of a revision to install.  Cannot be None.
-        """
-        raise NotImplementedError
-
-    @classmethod
-    def make_rev_options(cls, rev=None, extra_args=None):
-        # type: (Optional[str], Optional[CommandArgs]) -> RevOptions
-        """
-        Return a RevOptions object.
-
-        Args:
-          rev: the name of a revision to install.
-          extra_args: a list of extra options.
-        """
-        return RevOptions(cls, rev, extra_args=extra_args)
-
-    @classmethod
-    def _is_local_repository(cls, repo):
-        # type: (str) -> bool
-        """
-           posix absolute paths start with os.path.sep,
-           win32 ones start with drive (like c:\\folder)
-        """
-        drive, tail = os.path.splitdrive(repo)
-        return repo.startswith(os.path.sep) or bool(drive)
-
-    def export(self, location, url):
-        # type: (str, HiddenText) -> None
-        """
-        Export the repository at the url to the destination location
-        i.e. only download the files, without vcs informations
-
-        :param url: the repository URL starting with a vcs prefix.
-        """
-        raise NotImplementedError
-
-    @classmethod
-    def get_netloc_and_auth(cls, netloc, scheme):
-        """
-        Parse the repository URL's netloc, and return the new netloc to use
-        along with auth information.
-
-        Args:
-          netloc: the original repository URL netloc.
-          scheme: the repository URL's scheme without the vcs prefix.
-
-        This is mainly for the Subversion class to override, so that auth
-        information can be provided via the --username and --password options
-        instead of through the URL.  For other subclasses like Git without
-        such an option, auth information must stay in the URL.
-
-        Returns: (netloc, (username, password)).
-        """
-        return netloc, (None, None)
-
-    @classmethod
-    def get_url_rev_and_auth(cls, url):
-        # type: (str) -> Tuple[str, Optional[str], AuthInfo]
-        """
-        Parse the repository URL to use, and return the URL, revision,
-        and auth info to use.
-
-        Returns: (url, rev, (username, password)).
-        """
-        scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
-        if '+' not in scheme:
-            raise ValueError(
-                "Sorry, {!r} is a malformed VCS url. "
-                "The format is +://, "
-                "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
-            )
-        # Remove the vcs prefix.
-        scheme = scheme.split('+', 1)[1]
-        netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
-        rev = None
-        if '@' in path:
-            path, rev = path.rsplit('@', 1)
-        url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
-        return url, rev, user_pass
-
-    @staticmethod
-    def make_rev_args(username, password):
-        # type: (Optional[str], Optional[HiddenText]) -> CommandArgs
-        """
-        Return the RevOptions "extra arguments" to use in obtain().
-        """
-        return []
-
-    def get_url_rev_options(self, url):
-        # type: (HiddenText) -> Tuple[HiddenText, RevOptions]
-        """
-        Return the URL and RevOptions object to use in obtain() and in
-        some cases export(), as a tuple (url, rev_options).
-        """
-        secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
-        username, secret_password = user_pass
-        password = None  # type: Optional[HiddenText]
-        if secret_password is not None:
-            password = hide_value(secret_password)
-        extra_args = self.make_rev_args(username, password)
-        rev_options = self.make_rev_options(rev, extra_args=extra_args)
-
-        return hide_url(secret_url), rev_options
-
-    @staticmethod
-    def normalize_url(url):
-        # type: (str) -> str
-        """
-        Normalize a URL for comparison by unquoting it and removing any
-        trailing slash.
-        """
-        return urllib_parse.unquote(url).rstrip('/')
-
-    @classmethod
-    def compare_urls(cls, url1, url2):
-        # type: (str, str) -> bool
-        """
-        Compare two repo URLs for identity, ignoring incidental differences.
-        """
-        return (cls.normalize_url(url1) == cls.normalize_url(url2))
-
-    def fetch_new(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        """
-        Fetch a revision from a repository, in the case that this is the
-        first fetch from the repository.
-
-        Args:
-          dest: the directory to fetch the repository to.
-          rev_options: a RevOptions object.
-        """
-        raise NotImplementedError
-
-    def switch(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        """
-        Switch the repo at ``dest`` to point to ``URL``.
-
-        Args:
-          rev_options: a RevOptions object.
-        """
-        raise NotImplementedError
-
-    def update(self, dest, url, rev_options):
-        # type: (str, HiddenText, RevOptions) -> None
-        """
-        Update an already-existing repo to the given ``rev_options``.
-
-        Args:
-          rev_options: a RevOptions object.
-        """
-        raise NotImplementedError
-
-    @classmethod
-    def is_commit_id_equal(cls, dest, name):
-        """
-        Return whether the id of the current commit equals the given name.
-
-        Args:
-          dest: the repository directory.
-          name: a string name.
-        """
-        raise NotImplementedError
-
-    def obtain(self, dest, url):
-        # type: (str, HiddenText) -> None
-        """
-        Install or update in editable mode the package represented by this
-        VersionControl object.
-
-        :param dest: the repository directory in which to install or update.
-        :param url: the repository URL starting with a vcs prefix.
-        """
-        url, rev_options = self.get_url_rev_options(url)
-
-        if not os.path.exists(dest):
-            self.fetch_new(dest, url, rev_options)
-            return
-
-        rev_display = rev_options.to_display()
-        if self.is_repository_directory(dest):
-            existing_url = self.get_remote_url(dest)
-            if self.compare_urls(existing_url, url.secret):
-                logger.debug(
-                    '%s in %s exists, and has correct URL (%s)',
-                    self.repo_name.title(),
-                    display_path(dest),
-                    url,
-                )
-                if not self.is_commit_id_equal(dest, rev_options.rev):
-                    logger.info(
-                        'Updating %s %s%s',
-                        display_path(dest),
-                        self.repo_name,
-                        rev_display,
-                    )
-                    self.update(dest, url, rev_options)
-                else:
-                    logger.info('Skipping because already up-to-date.')
-                return
-
-            logger.warning(
-                '%s %s in %s exists with URL %s',
-                self.name,
-                self.repo_name,
-                display_path(dest),
-                existing_url,
-            )
-            prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
-                      ('s', 'i', 'w', 'b'))
-        else:
-            logger.warning(
-                'Directory %s already exists, and is not a %s %s.',
-                dest,
-                self.name,
-                self.repo_name,
-            )
-            # https://github.com/python/mypy/issues/1174
-            prompt = ('(i)gnore, (w)ipe, (b)ackup ',  # type: ignore
-                      ('i', 'w', 'b'))
-
-        logger.warning(
-            'The plan is to install the %s repository %s',
-            self.name,
-            url,
-        )
-        response = ask_path_exists('What to do?  %s' % prompt[0], prompt[1])
-
-        if response == 'a':
-            sys.exit(-1)
-
-        if response == 'w':
-            logger.warning('Deleting %s', display_path(dest))
-            rmtree(dest)
-            self.fetch_new(dest, url, rev_options)
-            return
-
-        if response == 'b':
-            dest_dir = backup_dir(dest)
-            logger.warning(
-                'Backing up %s to %s', display_path(dest), dest_dir,
-            )
-            shutil.move(dest, dest_dir)
-            self.fetch_new(dest, url, rev_options)
-            return
-
-        # Do nothing if the response is "i".
-        if response == 's':
-            logger.info(
-                'Switching %s %s to %s%s',
-                self.repo_name,
-                display_path(dest),
-                url,
-                rev_display,
-            )
-            self.switch(dest, url, rev_options)
-
-    def unpack(self, location, url):
-        # type: (str, HiddenText) -> None
-        """
-        Clean up current location and download the url repository
-        (and vcs infos) into location
-
-        :param url: the repository URL starting with a vcs prefix.
-        """
-        if os.path.exists(location):
-            rmtree(location)
-        self.obtain(location, url=url)
-
-    @classmethod
-    def get_remote_url(cls, location):
-        """
-        Return the url used at location
-
-        Raises RemoteNotFoundError if the repository does not have a remote
-        url configured.
-        """
-        raise NotImplementedError
-
-    @classmethod
-    def get_revision(cls, location):
-        """
-        Return the current commit id of the files at the given location.
-        """
-        raise NotImplementedError
-
-    @classmethod
-    def run_command(
-        cls,
-        cmd,  # type: Union[List[str], CommandArgs]
-        show_stdout=True,  # type: bool
-        cwd=None,  # type: Optional[str]
-        on_returncode='raise',  # type: str
-        extra_ok_returncodes=None,  # type: Optional[Iterable[int]]
-        command_desc=None,  # type: Optional[str]
-        extra_environ=None,  # type: Optional[Mapping[str, Any]]
-        spinner=None,  # type: Optional[SpinnerInterface]
-        log_failed_cmd=True
-    ):
-        # type: (...) -> Text
-        """
-        Run a VCS subcommand
-        This is simply a wrapper around call_subprocess that adds the VCS
-        command name, and checks that the VCS is available
-        """
-        cmd = make_command(cls.name, *cmd)
-        try:
-            return call_subprocess(cmd, show_stdout, cwd,
-                                   on_returncode=on_returncode,
-                                   extra_ok_returncodes=extra_ok_returncodes,
-                                   command_desc=command_desc,
-                                   extra_environ=extra_environ,
-                                   unset_environ=cls.unset_environ,
-                                   spinner=spinner,
-                                   log_failed_cmd=log_failed_cmd)
-        except OSError as e:
-            # errno.ENOENT = no such file or directory
-            # In other words, the VCS executable isn't available
-            if e.errno == errno.ENOENT:
-                raise BadCommand(
-                    'Cannot find command %r - do you have '
-                    '%r installed and in your '
-                    'PATH?' % (cls.name, cls.name))
-            else:
-                raise  # re-raise exception if a different error occurred
-
-    @classmethod
-    def is_repository_directory(cls, path):
-        # type: (str) -> bool
-        """
-        Return whether a directory path is a repository directory.
-        """
-        logger.debug('Checking in %s for %s (%s)...',
-                     path, cls.dirname, cls.name)
-        return os.path.exists(os.path.join(path, cls.dirname))
-
-    @classmethod
-    def controls_location(cls, location):
-        # type: (str) -> bool
-        """
-        Check if a location is controlled by the vcs.
-        It is meant to be overridden to implement smarter detection
-        mechanisms for specific vcs.
-
-        This can do more than is_repository_directory() alone.  For example,
-        the Git override checks that Git is actually available.
-        """
-        return cls.is_repository_directory(location)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/wheel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/wheel.py
deleted file mode 100644
index 8f9778c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_internal/wheel.py
+++ /dev/null
@@ -1,1181 +0,0 @@
-"""
-Support for installing and building the "wheel" binary package format.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-# mypy: disallow-untyped-defs=False
-
-from __future__ import absolute_import
-
-import collections
-import compileall
-import csv
-import hashlib
-import logging
-import os.path
-import re
-import shutil
-import stat
-import sys
-import warnings
-from base64 import urlsafe_b64encode
-from email.parser import Parser
-
-from pip._vendor import pkg_resources
-from pip._vendor.distlib.scripts import ScriptMaker
-from pip._vendor.distlib.util import get_export_entry
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.six import StringIO
-
-from pip._internal import pep425tags
-from pip._internal.exceptions import (
-    InstallationError,
-    InvalidWheelFilename,
-    UnsupportedWheel,
-)
-from pip._internal.locations import distutils_scheme, get_major_minor_version
-from pip._internal.models.link import Link
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.marker_files import has_delete_marker_file
-from pip._internal.utils.misc import captured_stdout, ensure_dir, read_chunks
-from pip._internal.utils.setuptools_build import make_setuptools_shim_args
-from pip._internal.utils.subprocess import (
-    LOG_DIVIDER,
-    call_subprocess,
-    format_command_args,
-    runner_with_spinner_message,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.typing import MYPY_CHECK_RUNNING
-from pip._internal.utils.ui import open_spinner
-from pip._internal.utils.unpacking import unpack_file
-from pip._internal.utils.urls import path_to_url
-
-if MYPY_CHECK_RUNNING:
-    from typing import (
-        Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any,
-        Iterable, Callable, Set,
-    )
-    from pip._vendor.packaging.requirements import Requirement
-    from pip._internal.req.req_install import InstallRequirement
-    from pip._internal.operations.prepare import (
-        RequirementPreparer
-    )
-    from pip._internal.cache import WheelCache
-    from pip._internal.pep425tags import Pep425Tag
-
-    InstalledCSVRow = Tuple[str, ...]
-
-    BinaryAllowedPredicate = Callable[[InstallRequirement], bool]
-
-
-VERSION_COMPATIBLE = (1, 0)
-
-
-logger = logging.getLogger(__name__)
-
-
-def normpath(src, p):
-    return os.path.relpath(src, p).replace(os.path.sep, '/')
-
-
-def hash_file(path, blocksize=1 << 20):
-    # type: (str, int) -> Tuple[Any, int]
-    """Return (hash, length) for path using hashlib.sha256()"""
-    h = hashlib.sha256()
-    length = 0
-    with open(path, 'rb') as f:
-        for block in read_chunks(f, size=blocksize):
-            length += len(block)
-            h.update(block)
-    return (h, length)  # type: ignore
-
-
-def rehash(path, blocksize=1 << 20):
-    # type: (str, int) -> Tuple[str, str]
-    """Return (encoded_digest, length) for path using hashlib.sha256()"""
-    h, length = hash_file(path, blocksize)
-    digest = 'sha256=' + urlsafe_b64encode(
-        h.digest()
-    ).decode('latin1').rstrip('=')
-    # unicode/str python2 issues
-    return (digest, str(length))  # type: ignore
-
-
-def open_for_csv(name, mode):
-    # type: (str, Text) -> IO
-    if sys.version_info[0] < 3:
-        nl = {}  # type: Dict[str, Any]
-        bin = 'b'
-    else:
-        nl = {'newline': ''}  # type: Dict[str, Any]
-        bin = ''
-    return open(name, mode + bin, **nl)
-
-
-def replace_python_tag(wheelname, new_tag):
-    # type: (str, str) -> str
-    """Replace the Python tag in a wheel file name with a new value.
-    """
-    parts = wheelname.split('-')
-    parts[-3] = new_tag
-    return '-'.join(parts)
-
-
-def fix_script(path):
-    # type: (str) -> Optional[bool]
-    """Replace #!python with #!/path/to/python
-    Return True if file was changed."""
-    # XXX RECORD hashes will need to be updated
-    if os.path.isfile(path):
-        with open(path, 'rb') as script:
-            firstline = script.readline()
-            if not firstline.startswith(b'#!python'):
-                return False
-            exename = sys.executable.encode(sys.getfilesystemencoding())
-            firstline = b'#!' + exename + os.linesep.encode("ascii")
-            rest = script.read()
-        with open(path, 'wb') as script:
-            script.write(firstline)
-            script.write(rest)
-        return True
-    return None
-
-
-dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?)
-                                \.dist-info$""", re.VERBOSE)
-
-
-def root_is_purelib(name, wheeldir):
-    # type: (str, str) -> bool
-    """
-    Return True if the extracted wheel in wheeldir should go into purelib.
-    """
-    name_folded = name.replace("-", "_")
-    for item in os.listdir(wheeldir):
-        match = dist_info_re.match(item)
-        if match and match.group('name') == name_folded:
-            with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
-                for line in wheel:
-                    line = line.lower().rstrip()
-                    if line == "root-is-purelib: true":
-                        return True
-    return False
-
-
-def get_entrypoints(filename):
-    # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
-    if not os.path.exists(filename):
-        return {}, {}
-
-    # This is done because you can pass a string to entry_points wrappers which
-    # means that they may or may not be valid INI files. The attempt here is to
-    # strip leading and trailing whitespace in order to make them valid INI
-    # files.
-    with open(filename) as fp:
-        data = StringIO()
-        for line in fp:
-            data.write(line.strip())
-            data.write("\n")
-        data.seek(0)
-
-    # get the entry points and then the script names
-    entry_points = pkg_resources.EntryPoint.parse_map(data)
-    console = entry_points.get('console_scripts', {})
-    gui = entry_points.get('gui_scripts', {})
-
-    def _split_ep(s):
-        """get the string representation of EntryPoint, remove space and split
-        on '='"""
-        return str(s).replace(" ", "").split("=")
-
-    # convert the EntryPoint objects into strings with module:function
-    console = dict(_split_ep(v) for v in console.values())
-    gui = dict(_split_ep(v) for v in gui.values())
-    return console, gui
-
-
-def message_about_scripts_not_on_PATH(scripts):
-    # type: (Sequence[str]) -> Optional[str]
-    """Determine if any scripts are not on PATH and format a warning.
-
-    Returns a warning message if one or more scripts are not on PATH,
-    otherwise None.
-    """
-    if not scripts:
-        return None
-
-    # Group scripts by the path they were installed in
-    grouped_by_dir = collections.defaultdict(set)  # type: Dict[str, Set[str]]
-    for destfile in scripts:
-        parent_dir = os.path.dirname(destfile)
-        script_name = os.path.basename(destfile)
-        grouped_by_dir[parent_dir].add(script_name)
-
-    # We don't want to warn for directories that are on PATH.
-    not_warn_dirs = [
-        os.path.normcase(i).rstrip(os.sep) for i in
-        os.environ.get("PATH", "").split(os.pathsep)
-    ]
-    # If an executable sits with sys.executable, we don't warn for it.
-    #     This covers the case of venv invocations without activating the venv.
-    not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
-    warn_for = {
-        parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
-        if os.path.normcase(parent_dir) not in not_warn_dirs
-    }  # type: Dict[str, Set[str]]
-    if not warn_for:
-        return None
-
-    # Format a message
-    msg_lines = []
-    for parent_dir, dir_scripts in warn_for.items():
-        sorted_scripts = sorted(dir_scripts)  # type: List[str]
-        if len(sorted_scripts) == 1:
-            start_text = "script {} is".format(sorted_scripts[0])
-        else:
-            start_text = "scripts {} are".format(
-                ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
-            )
-
-        msg_lines.append(
-            "The {} installed in '{}' which is not on PATH."
-            .format(start_text, parent_dir)
-        )
-
-    last_line_fmt = (
-        "Consider adding {} to PATH or, if you prefer "
-        "to suppress this warning, use --no-warn-script-location."
-    )
-    if len(msg_lines) == 1:
-        msg_lines.append(last_line_fmt.format("this directory"))
-    else:
-        msg_lines.append(last_line_fmt.format("these directories"))
-
-    # Returns the formatted multiline message
-    return "\n".join(msg_lines)
-
-
-def sorted_outrows(outrows):
-    # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
-    """
-    Return the given rows of a RECORD file in sorted order.
-
-    Each row is a 3-tuple (path, hash, size) and corresponds to a record of
-    a RECORD file (see PEP 376 and PEP 427 for details).  For the rows
-    passed to this function, the size can be an integer as an int or string,
-    or the empty string.
-    """
-    # Normally, there should only be one row per path, in which case the
-    # second and third elements don't come into play when sorting.
-    # However, in cases in the wild where a path might happen to occur twice,
-    # we don't want the sort operation to trigger an error (but still want
-    # determinism).  Since the third element can be an int or string, we
-    # coerce each element to a string to avoid a TypeError in this case.
-    # For additional background, see--
-    # https://github.com/pypa/pip/issues/5868
-    return sorted(outrows, key=lambda row: tuple(str(x) for x in row))
-
-
-def get_csv_rows_for_installed(
-    old_csv_rows,  # type: Iterable[List[str]]
-    installed,  # type: Dict[str, str]
-    changed,  # type: set
-    generated,  # type: List[str]
-    lib_dir,  # type: str
-):
-    # type: (...) -> List[InstalledCSVRow]
-    """
-    :param installed: A map from archive RECORD path to installation RECORD
-        path.
-    """
-    installed_rows = []  # type: List[InstalledCSVRow]
-    for row in old_csv_rows:
-        if len(row) > 3:
-            logger.warning(
-                'RECORD line has more than three elements: {}'.format(row)
-            )
-        # Make a copy because we are mutating the row.
-        row = list(row)
-        old_path = row[0]
-        new_path = installed.pop(old_path, old_path)
-        row[0] = new_path
-        if new_path in changed:
-            digest, length = rehash(new_path)
-            row[1] = digest
-            row[2] = length
-        installed_rows.append(tuple(row))
-    for f in generated:
-        digest, length = rehash(f)
-        installed_rows.append((normpath(f, lib_dir), digest, str(length)))
-    for f in installed:
-        installed_rows.append((installed[f], '', ''))
-    return installed_rows
-
-
-class MissingCallableSuffix(Exception):
-    pass
-
-
-def _raise_for_invalid_entrypoint(specification):
-    entry = get_export_entry(specification)
-    if entry is not None and entry.suffix is None:
-        raise MissingCallableSuffix(str(entry))
-
-
-class PipScriptMaker(ScriptMaker):
-    def make(self, specification, options=None):
-        _raise_for_invalid_entrypoint(specification)
-        return super(PipScriptMaker, self).make(specification, options)
-
-
-def move_wheel_files(
-    name,  # type: str
-    req,  # type: Requirement
-    wheeldir,  # type: str
-    user=False,  # type: bool
-    home=None,  # type: Optional[str]
-    root=None,  # type: Optional[str]
-    pycompile=True,  # type: bool
-    scheme=None,  # type: Optional[Mapping[str, str]]
-    isolated=False,  # type: bool
-    prefix=None,  # type: Optional[str]
-    warn_script_location=True  # type: bool
-):
-    # type: (...) -> None
-    """Install a wheel"""
-    # TODO: Investigate and break this up.
-    # TODO: Look into moving this into a dedicated class for representing an
-    #       installation.
-
-    if not scheme:
-        scheme = distutils_scheme(
-            name, user=user, home=home, root=root, isolated=isolated,
-            prefix=prefix,
-        )
-
-    if root_is_purelib(name, wheeldir):
-        lib_dir = scheme['purelib']
-    else:
-        lib_dir = scheme['platlib']
-
-    info_dir = []  # type: List[str]
-    data_dirs = []
-    source = wheeldir.rstrip(os.path.sep) + os.path.sep
-
-    # Record details of the files moved
-    #   installed = files copied from the wheel to the destination
-    #   changed = files changed while installing (scripts #! line typically)
-    #   generated = files newly generated during the install (script wrappers)
-    installed = {}  # type: Dict[str, str]
-    changed = set()
-    generated = []  # type: List[str]
-
-    # Compile all of the pyc files that we're going to be installing
-    if pycompile:
-        with captured_stdout() as stdout:
-            with warnings.catch_warnings():
-                warnings.filterwarnings('ignore')
-                compileall.compile_dir(source, force=True, quiet=True)
-        logger.debug(stdout.getvalue())
-
-    def record_installed(srcfile, destfile, modified=False):
-        """Map archive RECORD paths to installation RECORD paths."""
-        oldpath = normpath(srcfile, wheeldir)
-        newpath = normpath(destfile, lib_dir)
-        installed[oldpath] = newpath
-        if modified:
-            changed.add(destfile)
-
-    def clobber(source, dest, is_base, fixer=None, filter=None):
-        ensure_dir(dest)  # common for the 'include' path
-
-        for dir, subdirs, files in os.walk(source):
-            basedir = dir[len(source):].lstrip(os.path.sep)
-            destdir = os.path.join(dest, basedir)
-            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
-                continue
-            for s in subdirs:
-                destsubdir = os.path.join(dest, basedir, s)
-                if is_base and basedir == '' and destsubdir.endswith('.data'):
-                    data_dirs.append(s)
-                    continue
-                elif (is_base and
-                        s.endswith('.dist-info') and
-                        canonicalize_name(s).startswith(
-                            canonicalize_name(req.name))):
-                    assert not info_dir, ('Multiple .dist-info directories: ' +
-                                          destsubdir + ', ' +
-                                          ', '.join(info_dir))
-                    info_dir.append(destsubdir)
-            for f in files:
-                # Skip unwanted files
-                if filter and filter(f):
-                    continue
-                srcfile = os.path.join(dir, f)
-                destfile = os.path.join(dest, basedir, f)
-                # directory creation is lazy and after the file filtering above
-                # to ensure we don't install empty dirs; empty dirs can't be
-                # uninstalled.
-                ensure_dir(destdir)
-
-                # copyfile (called below) truncates the destination if it
-                # exists and then writes the new contents. This is fine in most
-                # cases, but can cause a segfault if pip has loaded a shared
-                # object (e.g. from pyopenssl through its vendored urllib3)
-                # Since the shared object is mmap'd an attempt to call a
-                # symbol in it will then cause a segfault. Unlinking the file
-                # allows writing of new contents while allowing the process to
-                # continue to use the old copy.
-                if os.path.exists(destfile):
-                    os.unlink(destfile)
-
-                # We use copyfile (not move, copy, or copy2) to be extra sure
-                # that we are not moving directories over (copyfile fails for
-                # directories) as well as to ensure that we are not copying
-                # over any metadata because we want more control over what
-                # metadata we actually copy over.
-                shutil.copyfile(srcfile, destfile)
-
-                # Copy over the metadata for the file, currently this only
-                # includes the atime and mtime.
-                st = os.stat(srcfile)
-                if hasattr(os, "utime"):
-                    os.utime(destfile, (st.st_atime, st.st_mtime))
-
-                # If our file is executable, then make our destination file
-                # executable.
-                if os.access(srcfile, os.X_OK):
-                    st = os.stat(srcfile)
-                    permissions = (
-                        st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
-                    )
-                    os.chmod(destfile, permissions)
-
-                changed = False
-                if fixer:
-                    changed = fixer(destfile)
-                record_installed(srcfile, destfile, changed)
-
-    clobber(source, lib_dir, True)
-
-    assert info_dir, "%s .dist-info directory not found" % req
-
-    # Get the defined entry points
-    ep_file = os.path.join(info_dir[0], 'entry_points.txt')
-    console, gui = get_entrypoints(ep_file)
-
-    def is_entrypoint_wrapper(name):
-        # EP, EP.exe and EP-script.py are scripts generated for
-        # entry point EP by setuptools
-        if name.lower().endswith('.exe'):
-            matchname = name[:-4]
-        elif name.lower().endswith('-script.py'):
-            matchname = name[:-10]
-        elif name.lower().endswith(".pya"):
-            matchname = name[:-4]
-        else:
-            matchname = name
-        # Ignore setuptools-generated scripts
-        return (matchname in console or matchname in gui)
-
-    for datadir in data_dirs:
-        fixer = None
-        filter = None
-        for subdir in os.listdir(os.path.join(wheeldir, datadir)):
-            fixer = None
-            if subdir == 'scripts':
-                fixer = fix_script
-                filter = is_entrypoint_wrapper
-            source = os.path.join(wheeldir, datadir, subdir)
-            dest = scheme[subdir]
-            clobber(source, dest, False, fixer=fixer, filter=filter)
-
-    maker = PipScriptMaker(None, scheme['scripts'])
-
-    # Ensure old scripts are overwritten.
-    # See https://github.com/pypa/pip/issues/1800
-    maker.clobber = True
-
-    # Ensure we don't generate any variants for scripts because this is almost
-    # never what somebody wants.
-    # See https://bitbucket.org/pypa/distlib/issue/35/
-    maker.variants = {''}
-
-    # This is required because otherwise distlib creates scripts that are not
-    # executable.
-    # See https://bitbucket.org/pypa/distlib/issue/32/
-    maker.set_mode = True
-
-    scripts_to_generate = []
-
-    # Special case pip and setuptools to generate versioned wrappers
-    #
-    # The issue is that some projects (specifically, pip and setuptools) use
-    # code in setup.py to create "versioned" entry points - pip2.7 on Python
-    # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
-    # the wheel metadata at build time, and so if the wheel is installed with
-    # a *different* version of Python the entry points will be wrong. The
-    # correct fix for this is to enhance the metadata to be able to describe
-    # such versioned entry points, but that won't happen till Metadata 2.0 is
-    # available.
-    # In the meantime, projects using versioned entry points will either have
-    # incorrect versioned entry points, or they will not be able to distribute
-    # "universal" wheels (i.e., they will need a wheel per Python version).
-    #
-    # Because setuptools and pip are bundled with _ensurepip and virtualenv,
-    # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
-    # override the versioned entry points in the wheel and generate the
-    # correct ones. This code is purely a short-term measure until Metadata 2.0
-    # is available.
-    #
-    # To add the level of hack in this section of code, in order to support
-    # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
-    # variable which will control which version scripts get installed.
-    #
-    # ENSUREPIP_OPTIONS=altinstall
-    #   - Only pipX.Y and easy_install-X.Y will be generated and installed
-    # ENSUREPIP_OPTIONS=install
-    #   - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
-    #     that this option is technically if ENSUREPIP_OPTIONS is set and is
-    #     not altinstall
-    # DEFAULT
-    #   - The default behavior is to install pip, pipX, pipX.Y, easy_install
-    #     and easy_install-X.Y.
-    pip_script = console.pop('pip', None)
-    if pip_script:
-        if "ENSUREPIP_OPTIONS" not in os.environ:
-            scripts_to_generate.append('pip = ' + pip_script)
-
-        if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
-            scripts_to_generate.append(
-                'pip%s = %s' % (sys.version_info[0], pip_script)
-            )
-
-        scripts_to_generate.append(
-            'pip%s = %s' % (get_major_minor_version(), pip_script)
-        )
-        # Delete any other versioned pip entry points
-        pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
-        for k in pip_ep:
-            del console[k]
-    easy_install_script = console.pop('easy_install', None)
-    if easy_install_script:
-        if "ENSUREPIP_OPTIONS" not in os.environ:
-            scripts_to_generate.append(
-                'easy_install = ' + easy_install_script
-            )
-
-        scripts_to_generate.append(
-            'easy_install-%s = %s' % (
-                get_major_minor_version(), easy_install_script
-            )
-        )
-        # Delete any other versioned easy_install entry points
-        easy_install_ep = [
-            k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
-        ]
-        for k in easy_install_ep:
-            del console[k]
-
-    # Generate the console and GUI entry points specified in the wheel
-    scripts_to_generate.extend(
-        '%s = %s' % kv for kv in console.items()
-    )
-
-    gui_scripts_to_generate = [
-        '%s = %s' % kv for kv in gui.items()
-    ]
-
-    generated_console_scripts = []  # type: List[str]
-
-    try:
-        generated_console_scripts = maker.make_multiple(scripts_to_generate)
-        generated.extend(generated_console_scripts)
-
-        generated.extend(
-            maker.make_multiple(gui_scripts_to_generate, {'gui': True})
-        )
-    except MissingCallableSuffix as e:
-        entry = e.args[0]
-        raise InstallationError(
-            "Invalid script entry point: {} for req: {} - A callable "
-            "suffix is required. Cf https://packaging.python.org/en/"
-            "latest/distributing.html#console-scripts for more "
-            "information.".format(entry, req)
-        )
-
-    if warn_script_location:
-        msg = message_about_scripts_not_on_PATH(generated_console_scripts)
-        if msg is not None:
-            logger.warning(msg)
-
-    # Record pip as the installer
-    installer = os.path.join(info_dir[0], 'INSTALLER')
-    temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip')
-    with open(temp_installer, 'wb') as installer_file:
-        installer_file.write(b'pip\n')
-    shutil.move(temp_installer, installer)
-    generated.append(installer)
-
-    # Record details of all files installed
-    record = os.path.join(info_dir[0], 'RECORD')
-    temp_record = os.path.join(info_dir[0], 'RECORD.pip')
-    with open_for_csv(record, 'r') as record_in:
-        with open_for_csv(temp_record, 'w+') as record_out:
-            reader = csv.reader(record_in)
-            outrows = get_csv_rows_for_installed(
-                reader, installed=installed, changed=changed,
-                generated=generated, lib_dir=lib_dir,
-            )
-            writer = csv.writer(record_out)
-            # Sort to simplify testing.
-            for row in sorted_outrows(outrows):
-                writer.writerow(row)
-    shutil.move(temp_record, record)
-
-
-def wheel_version(source_dir):
-    # type: (Optional[str]) -> Optional[Tuple[int, ...]]
-    """
-    Return the Wheel-Version of an extracted wheel, if possible.
-
-    Otherwise, return None if we couldn't parse / extract it.
-    """
-    try:
-        dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
-
-        wheel_data = dist.get_metadata('WHEEL')
-        wheel_data = Parser().parsestr(wheel_data)
-
-        version = wheel_data['Wheel-Version'].strip()
-        version = tuple(map(int, version.split('.')))
-        return version
-    except Exception:
-        return None
-
-
-def check_compatibility(version, name):
-    # type: (Optional[Tuple[int, ...]], str) -> None
-    """
-    Raises errors or warns if called with an incompatible Wheel-Version.
-
-    Pip should refuse to install a Wheel-Version that's a major series
-    ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
-    installing a version only minor version ahead (e.g 1.2 > 1.1).
-
-    version: a 2-tuple representing a Wheel-Version (Major, Minor)
-    name: name of wheel or package to raise exception about
-
-    :raises UnsupportedWheel: when an incompatible Wheel-Version is given
-    """
-    if not version:
-        raise UnsupportedWheel(
-            "%s is in an unsupported or invalid wheel" % name
-        )
-    if version[0] > VERSION_COMPATIBLE[0]:
-        raise UnsupportedWheel(
-            "%s's Wheel-Version (%s) is not compatible with this version "
-            "of pip" % (name, '.'.join(map(str, version)))
-        )
-    elif version > VERSION_COMPATIBLE:
-        logger.warning(
-            'Installing from a newer Wheel-Version (%s)',
-            '.'.join(map(str, version)),
-        )
-
-
-def format_tag(file_tag):
-    # type: (Tuple[str, ...]) -> str
-    """
-    Format three tags in the form "--".
-
-    :param file_tag: A 3-tuple of tags (python_tag, abi_tag, platform_tag).
-    """
-    return '-'.join(file_tag)
-
-
-class Wheel(object):
-    """A wheel file"""
-
-    # TODO: Maybe move the class into the models sub-package
-    # TODO: Maybe move the install code into this class
-
-    wheel_file_re = re.compile(
-        r"""^(?P(?P.+?)-(?P.*?))
-        ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?)
-        \.whl|\.dist-info)$""",
-        re.VERBOSE
-    )
-
-    def __init__(self, filename):
-        # type: (str) -> None
-        """
-        :raises InvalidWheelFilename: when the filename is invalid for a wheel
-        """
-        wheel_info = self.wheel_file_re.match(filename)
-        if not wheel_info:
-            raise InvalidWheelFilename(
-                "%s is not a valid wheel filename." % filename
-            )
-        self.filename = filename
-        self.name = wheel_info.group('name').replace('_', '-')
-        # we'll assume "_" means "-" due to wheel naming scheme
-        # (https://github.com/pypa/pip/issues/1150)
-        self.version = wheel_info.group('ver').replace('_', '-')
-        self.build_tag = wheel_info.group('build')
-        self.pyversions = wheel_info.group('pyver').split('.')
-        self.abis = wheel_info.group('abi').split('.')
-        self.plats = wheel_info.group('plat').split('.')
-
-        # All the tag combinations from this file
-        self.file_tags = {
-            (x, y, z) for x in self.pyversions
-            for y in self.abis for z in self.plats
-        }
-
-    def get_formatted_file_tags(self):
-        # type: () -> List[str]
-        """
-        Return the wheel's tags as a sorted list of strings.
-        """
-        return sorted(format_tag(tag) for tag in self.file_tags)
-
-    def support_index_min(self, tags):
-        # type: (List[Pep425Tag]) -> int
-        """
-        Return the lowest index that one of the wheel's file_tag combinations
-        achieves in the given list of supported tags.
-
-        For example, if there are 8 supported tags and one of the file tags
-        is first in the list, then return 0.
-
-        :param tags: the PEP 425 tags to check the wheel against, in order
-            with most preferred first.
-
-        :raises ValueError: If none of the wheel's file tags match one of
-            the supported tags.
-        """
-        return min(tags.index(tag) for tag in self.file_tags if tag in tags)
-
-    def supported(self, tags):
-        # type: (List[Pep425Tag]) -> bool
-        """
-        Return whether the wheel is compatible with one of the given tags.
-
-        :param tags: the PEP 425 tags to check the wheel against.
-        """
-        return not self.file_tags.isdisjoint(tags)
-
-
-def _contains_egg_info(
-        s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)):
-    """Determine whether the string looks like an egg_info.
-
-    :param s: The string to parse. E.g. foo-2.1
-    """
-    return bool(_egg_info_re.search(s))
-
-
-def should_use_ephemeral_cache(
-    req,  # type: InstallRequirement
-    should_unpack,  # type: bool
-    cache_available,  # type: bool
-    check_binary_allowed,  # type: BinaryAllowedPredicate
-):
-    # type: (...) -> Optional[bool]
-    """
-    Return whether to build an InstallRequirement object using the
-    ephemeral cache.
-
-    :param cache_available: whether a cache directory is available for the
-        should_unpack=True case.
-
-    :return: True or False to build the requirement with ephem_cache=True
-        or False, respectively; or None not to build the requirement.
-    """
-    if req.constraint:
-        # never build requirements that are merely constraints
-        return None
-    if req.is_wheel:
-        if not should_unpack:
-            logger.info(
-                'Skipping %s, due to already being wheel.', req.name,
-            )
-        return None
-    if not should_unpack:
-        # i.e. pip wheel, not pip install;
-        # return False, knowing that the caller will never cache
-        # in this case anyway, so this return merely means "build it".
-        # TODO improve this behavior
-        return False
-
-    if req.editable or not req.source_dir:
-        return None
-
-    if not check_binary_allowed(req):
-        logger.info(
-            "Skipping wheel build for %s, due to binaries "
-            "being disabled for it.", req.name,
-        )
-        return None
-
-    if req.link and req.link.is_vcs:
-        # VCS checkout. Build wheel just for this run.
-        return True
-
-    link = req.link
-    base, ext = link.splitext()
-    if cache_available and _contains_egg_info(base):
-        return False
-
-    # Otherwise, build the wheel just for this run using the ephemeral
-    # cache since we are either in the case of e.g. a local directory, or
-    # no cache directory is available to use.
-    return True
-
-
-def format_command_result(
-    command_args,  # type: List[str]
-    command_output,  # type: str
-):
-    # type: (...) -> str
-    """
-    Format command information for logging.
-    """
-    command_desc = format_command_args(command_args)
-    text = 'Command arguments: {}\n'.format(command_desc)
-
-    if not command_output:
-        text += 'Command output: None'
-    elif logger.getEffectiveLevel() > logging.DEBUG:
-        text += 'Command output: [use --verbose to show]'
-    else:
-        if not command_output.endswith('\n'):
-            command_output += '\n'
-        text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
-
-    return text
-
-
-def get_legacy_build_wheel_path(
-    names,  # type: List[str]
-    temp_dir,  # type: str
-    req,  # type: InstallRequirement
-    command_args,  # type: List[str]
-    command_output,  # type: str
-):
-    # type: (...) -> Optional[str]
-    """
-    Return the path to the wheel in the temporary build directory.
-    """
-    # Sort for determinism.
-    names = sorted(names)
-    if not names:
-        msg = (
-            'Legacy build of wheel for {!r} created no files.\n'
-        ).format(req.name)
-        msg += format_command_result(command_args, command_output)
-        logger.warning(msg)
-        return None
-
-    if len(names) > 1:
-        msg = (
-            'Legacy build of wheel for {!r} created more than one file.\n'
-            'Filenames (choosing first): {}\n'
-        ).format(req.name, names)
-        msg += format_command_result(command_args, command_output)
-        logger.warning(msg)
-
-    return os.path.join(temp_dir, names[0])
-
-
-def _always_true(_):
-    return True
-
-
-class WheelBuilder(object):
-    """Build wheels from a RequirementSet."""
-
-    def __init__(
-        self,
-        preparer,  # type: RequirementPreparer
-        wheel_cache,  # type: WheelCache
-        build_options=None,  # type: Optional[List[str]]
-        global_options=None,  # type: Optional[List[str]]
-        check_binary_allowed=None,  # type: Optional[BinaryAllowedPredicate]
-        no_clean=False  # type: bool
-    ):
-        # type: (...) -> None
-        if check_binary_allowed is None:
-            # Binaries allowed by default.
-            check_binary_allowed = _always_true
-
-        self.preparer = preparer
-        self.wheel_cache = wheel_cache
-
-        self._wheel_dir = preparer.wheel_download_dir
-
-        self.build_options = build_options or []
-        self.global_options = global_options or []
-        self.check_binary_allowed = check_binary_allowed
-        self.no_clean = no_clean
-
-    def _build_one(self, req, output_dir, python_tag=None):
-        """Build one wheel.
-
-        :return: The filename of the built wheel, or None if the build failed.
-        """
-        # Install build deps into temporary directory (PEP 518)
-        with req.build_env:
-            return self._build_one_inside_env(req, output_dir,
-                                              python_tag=python_tag)
-
-    def _build_one_inside_env(self, req, output_dir, python_tag=None):
-        with TempDirectory(kind="wheel") as temp_dir:
-            if req.use_pep517:
-                builder = self._build_one_pep517
-            else:
-                builder = self._build_one_legacy
-            wheel_path = builder(req, temp_dir.path, python_tag=python_tag)
-            if wheel_path is not None:
-                wheel_name = os.path.basename(wheel_path)
-                dest_path = os.path.join(output_dir, wheel_name)
-                try:
-                    wheel_hash, length = hash_file(wheel_path)
-                    shutil.move(wheel_path, dest_path)
-                    logger.info('Created wheel for %s: '
-                                'filename=%s size=%d sha256=%s',
-                                req.name, wheel_name, length,
-                                wheel_hash.hexdigest())
-                    logger.info('Stored in directory: %s', output_dir)
-                    return dest_path
-                except Exception:
-                    pass
-            # Ignore return, we can't do anything else useful.
-            self._clean_one(req)
-            return None
-
-    def _base_setup_args(self, req):
-        # NOTE: Eventually, we'd want to also -S to the flags here, when we're
-        # isolating. Currently, it breaks Python in virtualenvs, because it
-        # relies on site.py to find parts of the standard library outside the
-        # virtualenv.
-        return make_setuptools_shim_args(
-            req.setup_py_path,
-            global_options=self.global_options,
-            unbuffered_output=True
-        )
-
-    def _build_one_pep517(self, req, tempd, python_tag=None):
-        """Build one InstallRequirement using the PEP 517 build process.
-
-        Returns path to wheel if successfully built. Otherwise, returns None.
-        """
-        assert req.metadata_directory is not None
-        if self.build_options:
-            # PEP 517 does not support --build-options
-            logger.error('Cannot build wheel for %s using PEP 517 when '
-                         '--build-options is present' % (req.name,))
-            return None
-        try:
-            logger.debug('Destination directory: %s', tempd)
-
-            runner = runner_with_spinner_message(
-                'Building wheel for {} (PEP 517)'.format(req.name)
-            )
-            backend = req.pep517_backend
-            with backend.subprocess_runner(runner):
-                wheel_name = backend.build_wheel(
-                    tempd,
-                    metadata_directory=req.metadata_directory,
-                )
-            if python_tag:
-                # General PEP 517 backends don't necessarily support
-                # a "--python-tag" option, so we rename the wheel
-                # file directly.
-                new_name = replace_python_tag(wheel_name, python_tag)
-                os.rename(
-                    os.path.join(tempd, wheel_name),
-                    os.path.join(tempd, new_name)
-                )
-                # Reassign to simplify the return at the end of function
-                wheel_name = new_name
-        except Exception:
-            logger.error('Failed building wheel for %s', req.name)
-            return None
-        return os.path.join(tempd, wheel_name)
-
-    def _build_one_legacy(self, req, tempd, python_tag=None):
-        """Build one InstallRequirement using the "legacy" build process.
-
-        Returns path to wheel if successfully built. Otherwise, returns None.
-        """
-        base_args = self._base_setup_args(req)
-
-        spin_message = 'Building wheel for %s (setup.py)' % (req.name,)
-        with open_spinner(spin_message) as spinner:
-            logger.debug('Destination directory: %s', tempd)
-            wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
-                + self.build_options
-
-            if python_tag is not None:
-                wheel_args += ["--python-tag", python_tag]
-
-            try:
-                output = call_subprocess(
-                    wheel_args,
-                    cwd=req.unpacked_source_directory,
-                    spinner=spinner,
-                )
-            except Exception:
-                spinner.finish("error")
-                logger.error('Failed building wheel for %s', req.name)
-                return None
-
-            names = os.listdir(tempd)
-            wheel_path = get_legacy_build_wheel_path(
-                names=names,
-                temp_dir=tempd,
-                req=req,
-                command_args=wheel_args,
-                command_output=output,
-            )
-            return wheel_path
-
-    def _clean_one(self, req):
-        base_args = self._base_setup_args(req)
-
-        logger.info('Running setup.py clean for %s', req.name)
-        clean_args = base_args + ['clean', '--all']
-        try:
-            call_subprocess(clean_args, cwd=req.source_dir)
-            return True
-        except Exception:
-            logger.error('Failed cleaning build dir for %s', req.name)
-            return False
-
-    def build(
-        self,
-        requirements,  # type: Iterable[InstallRequirement]
-        should_unpack=False  # type: bool
-    ):
-        # type: (...) -> List[InstallRequirement]
-        """Build wheels.
-
-        :param should_unpack: If True, after building the wheel, unpack it
-            and replace the sdist with the unpacked version in preparation
-            for installation.
-        :return: True if all the wheels built correctly.
-        """
-        # pip install uses should_unpack=True.
-        # pip install never provides a _wheel_dir.
-        # pip wheel uses should_unpack=False.
-        # pip wheel always provides a _wheel_dir (via the preparer).
-        assert (
-            (should_unpack and not self._wheel_dir) or
-            (not should_unpack and self._wheel_dir)
-        )
-
-        buildset = []
-        cache_available = bool(self.wheel_cache.cache_dir)
-
-        for req in requirements:
-            ephem_cache = should_use_ephemeral_cache(
-                req,
-                should_unpack=should_unpack,
-                cache_available=cache_available,
-                check_binary_allowed=self.check_binary_allowed,
-            )
-            if ephem_cache is None:
-                continue
-
-            # Determine where the wheel should go.
-            if should_unpack:
-                if ephem_cache:
-                    output_dir = self.wheel_cache.get_ephem_path_for_link(
-                        req.link
-                    )
-                else:
-                    output_dir = self.wheel_cache.get_path_for_link(req.link)
-            else:
-                output_dir = self._wheel_dir
-
-            buildset.append((req, output_dir))
-
-        if not buildset:
-            return []
-
-        # TODO by @pradyunsg
-        # Should break up this method into 2 separate methods.
-
-        # Build the wheels.
-        logger.info(
-            'Building wheels for collected packages: %s',
-            ', '.join([req.name for (req, _) in buildset]),
-        )
-
-        python_tag = None
-        if should_unpack:
-            python_tag = pep425tags.implementation_tag
-
-        with indent_log():
-            build_success, build_failure = [], []
-            for req, output_dir in buildset:
-                try:
-                    ensure_dir(output_dir)
-                except OSError as e:
-                    logger.warning(
-                        "Building wheel for %s failed: %s",
-                        req.name, e,
-                    )
-                    build_failure.append(req)
-                    continue
-
-                wheel_file = self._build_one(
-                    req, output_dir,
-                    python_tag=python_tag,
-                )
-                if wheel_file:
-                    build_success.append(req)
-                    if should_unpack:
-                        # XXX: This is mildly duplicative with prepare_files,
-                        # but not close enough to pull out to a single common
-                        # method.
-                        # The code below assumes temporary source dirs -
-                        # prevent it doing bad things.
-                        if (
-                            req.source_dir and
-                            not has_delete_marker_file(req.source_dir)
-                        ):
-                            raise AssertionError(
-                                "bad source dir - missing marker")
-                        # Delete the source we built the wheel from
-                        req.remove_temporary_source()
-                        # set the build directory again - name is known from
-                        # the work prepare_files did.
-                        req.source_dir = req.ensure_build_location(
-                            self.preparer.build_dir
-                        )
-                        # Update the link for this.
-                        req.link = Link(path_to_url(wheel_file))
-                        assert req.link.is_wheel
-                        # extract the wheel into the dir
-                        unpack_file(req.link.file_path, req.source_dir)
-                else:
-                    build_failure.append(req)
-
-        # notify success/failure
-        if build_success:
-            logger.info(
-                'Successfully built %s',
-                ' '.join([req.name for req in build_success]),
-            )
-        if build_failure:
-            logger.info(
-                'Failed to build %s',
-                ' '.join([req.name for req in build_failure]),
-            )
-        # Return a list of requirements that failed to build
-        return build_failure
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/__init__.py
deleted file mode 100644
index a0fcb8e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/__init__.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
-pip._vendor is for vendoring dependencies of pip to prevent needing pip to
-depend on something external.
-
-Files inside of pip._vendor should be considered immutable and should only be
-updated to versions from upstream.
-"""
-from __future__ import absolute_import
-
-import glob
-import os.path
-import sys
-
-# Downstream redistributors which have debundled our dependencies should also
-# patch this value to be true. This will trigger the additional patching
-# to cause things like "six" to be available as pip.
-DEBUNDLED = False
-
-# By default, look in this directory for a bunch of .whl files which we will
-# add to the beginning of sys.path before attempting to import anything. This
-# is done to support downstream re-distributors like Debian and Fedora who
-# wish to create their own Wheels for our dependencies to aid in debundling.
-WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
-
-
-# Define a small helper function to alias our vendored modules to the real ones
-# if the vendored ones do not exist. This idea of this was taken from
-# https://github.com/kennethreitz/requests/pull/2567.
-def vendored(modulename):
-    vendored_name = "{0}.{1}".format(__name__, modulename)
-
-    try:
-        __import__(modulename, globals(), locals(), level=0)
-    except ImportError:
-        # We can just silently allow import failures to pass here. If we
-        # got to this point it means that ``import pip._vendor.whatever``
-        # failed and so did ``import whatever``. Since we're importing this
-        # upfront in an attempt to alias imports, not erroring here will
-        # just mean we get a regular import error whenever pip *actually*
-        # tries to import one of these modules to use it, which actually
-        # gives us a better error message than we would have otherwise
-        # gotten.
-        pass
-    else:
-        sys.modules[vendored_name] = sys.modules[modulename]
-        base, head = vendored_name.rsplit(".", 1)
-        setattr(sys.modules[base], head, sys.modules[modulename])
-
-
-# If we're operating in a debundled setup, then we want to go ahead and trigger
-# the aliasing of our vendored libraries as well as looking for wheels to add
-# to our sys.path. This will cause all of this code to be a no-op typically
-# however downstream redistributors can enable it in a consistent way across
-# all platforms.
-if DEBUNDLED:
-    # Actually look inside of WHEEL_DIR to find .whl files and add them to the
-    # front of our sys.path.
-    sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
-
-    # Actually alias all of our vendored dependencies.
-    vendored("cachecontrol")
-    vendored("colorama")
-    vendored("contextlib2")
-    vendored("distlib")
-    vendored("distro")
-    vendored("html5lib")
-    vendored("six")
-    vendored("six.moves")
-    vendored("six.moves.urllib")
-    vendored("six.moves.urllib.parse")
-    vendored("packaging")
-    vendored("packaging.version")
-    vendored("packaging.specifiers")
-    vendored("pep517")
-    vendored("pkg_resources")
-    vendored("progress")
-    vendored("pytoml")
-    vendored("retrying")
-    vendored("requests")
-    vendored("requests.exceptions")
-    vendored("requests.packages")
-    vendored("requests.packages.urllib3")
-    vendored("requests.packages.urllib3._collections")
-    vendored("requests.packages.urllib3.connection")
-    vendored("requests.packages.urllib3.connectionpool")
-    vendored("requests.packages.urllib3.contrib")
-    vendored("requests.packages.urllib3.contrib.ntlmpool")
-    vendored("requests.packages.urllib3.contrib.pyopenssl")
-    vendored("requests.packages.urllib3.exceptions")
-    vendored("requests.packages.urllib3.fields")
-    vendored("requests.packages.urllib3.filepost")
-    vendored("requests.packages.urllib3.packages")
-    vendored("requests.packages.urllib3.packages.ordered_dict")
-    vendored("requests.packages.urllib3.packages.six")
-    vendored("requests.packages.urllib3.packages.ssl_match_hostname")
-    vendored("requests.packages.urllib3.packages.ssl_match_hostname."
-             "_implementation")
-    vendored("requests.packages.urllib3.poolmanager")
-    vendored("requests.packages.urllib3.request")
-    vendored("requests.packages.urllib3.response")
-    vendored("requests.packages.urllib3.util")
-    vendored("requests.packages.urllib3.util.connection")
-    vendored("requests.packages.urllib3.util.request")
-    vendored("requests.packages.urllib3.util.response")
-    vendored("requests.packages.urllib3.util.retry")
-    vendored("requests.packages.urllib3.util.ssl_")
-    vendored("requests.packages.urllib3.util.timeout")
-    vendored("requests.packages.urllib3.util.url")
-    vendored("urllib3")
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/appdirs.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/appdirs.py
deleted file mode 100644
index 2bd3911..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/appdirs.py
+++ /dev/null
@@ -1,604 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2005-2010 ActiveState Software Inc.
-# Copyright (c) 2013 Eddy Petrișor
-
-"""Utilities for determining application-specific dirs.
-
-See  for details and usage.
-"""
-# Dev Notes:
-# - MSDN on where to store app data files:
-#   http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
-# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
-# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
-
-__version_info__ = (1, 4, 3)
-__version__ = '.'.join(map(str, __version_info__))
-
-
-import sys
-import os
-
-PY3 = sys.version_info[0] == 3
-
-if PY3:
-    unicode = str
-
-if sys.platform.startswith('java'):
-    import platform
-    os_name = platform.java_ver()[3][0]
-    if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
-        system = 'win32'
-    elif os_name.startswith('Mac'): # "Mac OS X", etc.
-        system = 'darwin'
-    else: # "Linux", "SunOS", "FreeBSD", etc.
-        # Setting this to "linux2" is not ideal, but only Windows or Mac
-        # are actually checked for and the rest of the module expects
-        # *sys.platform* style strings.
-        system = 'linux2'
-else:
-    system = sys.platform
-
-
-
-def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
-    r"""Return full path to the user-specific data dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "roaming" (boolean, default False) can be set True to use the Windows
-            roaming appdata directory. That means that for users on a Windows
-            network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user data directories are:
-        Mac OS X:               ~/Library/Application Support/
-        Unix:                   ~/.local/share/    # or in $XDG_DATA_HOME, if defined
-        Win XP (not roaming):   C:\Documents and Settings\\Application Data\\
-        Win XP (roaming):       C:\Documents and Settings\\Local Settings\Application Data\\
-        Win 7  (not roaming):   C:\Users\\AppData\Local\\
-        Win 7  (roaming):       C:\Users\\AppData\Roaming\\
-
-    For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
-    That means, by default "~/.local/share/".
-    """
-    if system == "win32":
-        if appauthor is None:
-            appauthor = appname
-        const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
-        path = os.path.normpath(_get_win_folder(const))
-        if appname:
-            if appauthor is not False:
-                path = os.path.join(path, appauthor, appname)
-            else:
-                path = os.path.join(path, appname)
-    elif system == 'darwin':
-        path = os.path.expanduser('~/Library/Application Support/')
-        if appname:
-            path = os.path.join(path, appname)
-    else:
-        path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
-        if appname:
-            path = os.path.join(path, appname)
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
-    r"""Return full path to the user-shared data dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "multipath" is an optional parameter only applicable to *nix
-            which indicates that the entire list of data dirs should be
-            returned. By default, the first item from XDG_DATA_DIRS is
-            returned, or '/usr/local/share/',
-            if XDG_DATA_DIRS is not set
-
-    Typical site data directories are:
-        Mac OS X:   /Library/Application Support/
-        Unix:       /usr/local/share/ or /usr/share/
-        Win XP:     C:\Documents and Settings\All Users\Application Data\\
-        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
-        Win 7:      C:\ProgramData\\   # Hidden, but writeable on Win 7.
-
-    For Unix, this is using the $XDG_DATA_DIRS[0] default.
-
-    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
-    """
-    if system == "win32":
-        if appauthor is None:
-            appauthor = appname
-        path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
-        if appname:
-            if appauthor is not False:
-                path = os.path.join(path, appauthor, appname)
-            else:
-                path = os.path.join(path, appname)
-    elif system == 'darwin':
-        path = os.path.expanduser('/Library/Application Support')
-        if appname:
-            path = os.path.join(path, appname)
-    else:
-        # XDG default for $XDG_DATA_DIRS
-        # only first, if multipath is False
-        path = os.getenv('XDG_DATA_DIRS',
-                         os.pathsep.join(['/usr/local/share', '/usr/share']))
-        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
-        if appname:
-            if version:
-                appname = os.path.join(appname, version)
-            pathlist = [os.sep.join([x, appname]) for x in pathlist]
-
-        if multipath:
-            path = os.pathsep.join(pathlist)
-        else:
-            path = pathlist[0]
-        return path
-
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
-    r"""Return full path to the user-specific config dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "roaming" (boolean, default False) can be set True to use the Windows
-            roaming appdata directory. That means that for users on a Windows
-            network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user config directories are:
-        Mac OS X:               same as user_data_dir
-        Unix:                   ~/.config/     # or in $XDG_CONFIG_HOME, if defined
-        Win *:                  same as user_data_dir
-
-    For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
-    That means, by default "~/.config/".
-    """
-    if system in ["win32", "darwin"]:
-        path = user_data_dir(appname, appauthor, None, roaming)
-    else:
-        path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
-        if appname:
-            path = os.path.join(path, appname)
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
-    r"""Return full path to the user-shared data dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "multipath" is an optional parameter only applicable to *nix
-            which indicates that the entire list of config dirs should be
-            returned. By default, the first item from XDG_CONFIG_DIRS is
-            returned, or '/etc/xdg/', if XDG_CONFIG_DIRS is not set
-
-    Typical site config directories are:
-        Mac OS X:   same as site_data_dir
-        Unix:       /etc/xdg/ or $XDG_CONFIG_DIRS[i]/ for each value in
-                    $XDG_CONFIG_DIRS
-        Win *:      same as site_data_dir
-        Vista:      (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
-
-    For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
-
-    WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
-    """
-    if system in ["win32", "darwin"]:
-        path = site_data_dir(appname, appauthor)
-        if appname and version:
-            path = os.path.join(path, version)
-    else:
-        # XDG default for $XDG_CONFIG_DIRS
-        # only first, if multipath is False
-        path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
-        pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
-        if appname:
-            if version:
-                appname = os.path.join(appname, version)
-            pathlist = [os.sep.join([x, appname]) for x in pathlist]
-
-        if multipath:
-            path = os.pathsep.join(pathlist)
-        else:
-            path = pathlist[0]
-    return path
-
-
-def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
-    r"""Return full path to the user-specific cache dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "opinion" (boolean) can be False to disable the appending of
-            "Cache" to the base app data dir for Windows. See
-            discussion below.
-
-    Typical user cache directories are:
-        Mac OS X:   ~/Library/Caches/
-        Unix:       ~/.cache/ (XDG default)
-        Win XP:     C:\Documents and Settings\\Local Settings\Application Data\\\Cache
-        Vista:      C:\Users\\AppData\Local\\\Cache
-
-    On Windows the only suggestion in the MSDN docs is that local settings go in
-    the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
-    app data dir (the default returned by `user_data_dir` above). Apps typically
-    put cache data somewhere *under* the given dir here. Some examples:
-        ...\Mozilla\Firefox\Profiles\\Cache
-        ...\Acme\SuperApp\Cache\1.0
-    OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
-    This can be disabled with the `opinion=False` option.
-    """
-    if system == "win32":
-        if appauthor is None:
-            appauthor = appname
-        path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
-        if appname:
-            if appauthor is not False:
-                path = os.path.join(path, appauthor, appname)
-            else:
-                path = os.path.join(path, appname)
-            if opinion:
-                path = os.path.join(path, "Cache")
-    elif system == 'darwin':
-        path = os.path.expanduser('~/Library/Caches')
-        if appname:
-            path = os.path.join(path, appname)
-    else:
-        path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
-        if appname:
-            path = os.path.join(path, appname)
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
-    r"""Return full path to the user-specific state dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "roaming" (boolean, default False) can be set True to use the Windows
-            roaming appdata directory. That means that for users on a Windows
-            network setup for roaming profiles, this user data will be
-            sync'd on login. See
-            
-            for a discussion of issues.
-
-    Typical user state directories are:
-        Mac OS X:  same as user_data_dir
-        Unix:      ~/.local/state/   # or in $XDG_STATE_HOME, if defined
-        Win *:     same as user_data_dir
-
-    For Unix, we follow this Debian proposal 
-    to extend the XDG spec and support $XDG_STATE_HOME.
-
-    That means, by default "~/.local/state/".
-    """
-    if system in ["win32", "darwin"]:
-        path = user_data_dir(appname, appauthor, None, roaming)
-    else:
-        path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
-        if appname:
-            path = os.path.join(path, appname)
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
-    r"""Return full path to the user-specific log dir for this application.
-
-        "appname" is the name of application.
-            If None, just the system directory is returned.
-        "appauthor" (only used on Windows) is the name of the
-            appauthor or distributing body for this application. Typically
-            it is the owning company name. This falls back to appname. You may
-            pass False to disable it.
-        "version" is an optional version path element to append to the
-            path. You might want to use this if you want multiple versions
-            of your app to be able to run independently. If used, this
-            would typically be ".".
-            Only applied when appname is present.
-        "opinion" (boolean) can be False to disable the appending of
-            "Logs" to the base app data dir for Windows, and "log" to the
-            base cache dir for Unix. See discussion below.
-
-    Typical user log directories are:
-        Mac OS X:   ~/Library/Logs/
-        Unix:       ~/.cache//log  # or under $XDG_CACHE_HOME if defined
-        Win XP:     C:\Documents and Settings\\Local Settings\Application Data\\\Logs
-        Vista:      C:\Users\\AppData\Local\\\Logs
-
-    On Windows the only suggestion in the MSDN docs is that local settings
-    go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
-    examples of what some windows apps use for a logs dir.)
-
-    OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
-    value for Windows and appends "log" to the user cache dir for Unix.
-    This can be disabled with the `opinion=False` option.
-    """
-    if system == "darwin":
-        path = os.path.join(
-            os.path.expanduser('~/Library/Logs'),
-            appname)
-    elif system == "win32":
-        path = user_data_dir(appname, appauthor, version)
-        version = False
-        if opinion:
-            path = os.path.join(path, "Logs")
-    else:
-        path = user_cache_dir(appname, appauthor, version)
-        version = False
-        if opinion:
-            path = os.path.join(path, "log")
-    if appname and version:
-        path = os.path.join(path, version)
-    return path
-
-
-class AppDirs(object):
-    """Convenience wrapper for getting application dirs."""
-    def __init__(self, appname=None, appauthor=None, version=None,
-            roaming=False, multipath=False):
-        self.appname = appname
-        self.appauthor = appauthor
-        self.version = version
-        self.roaming = roaming
-        self.multipath = multipath
-
-    @property
-    def user_data_dir(self):
-        return user_data_dir(self.appname, self.appauthor,
-                             version=self.version, roaming=self.roaming)
-
-    @property
-    def site_data_dir(self):
-        return site_data_dir(self.appname, self.appauthor,
-                             version=self.version, multipath=self.multipath)
-
-    @property
-    def user_config_dir(self):
-        return user_config_dir(self.appname, self.appauthor,
-                               version=self.version, roaming=self.roaming)
-
-    @property
-    def site_config_dir(self):
-        return site_config_dir(self.appname, self.appauthor,
-                             version=self.version, multipath=self.multipath)
-
-    @property
-    def user_cache_dir(self):
-        return user_cache_dir(self.appname, self.appauthor,
-                              version=self.version)
-
-    @property
-    def user_state_dir(self):
-        return user_state_dir(self.appname, self.appauthor,
-                              version=self.version)
-
-    @property
-    def user_log_dir(self):
-        return user_log_dir(self.appname, self.appauthor,
-                            version=self.version)
-
-
-#---- internal support stuff
-
-def _get_win_folder_from_registry(csidl_name):
-    """This is a fallback technique at best. I'm not sure if using the
-    registry for this guarantees us the correct answer for all CSIDL_*
-    names.
-    """
-    if PY3:
-      import winreg as _winreg
-    else:
-      import _winreg
-
-    shell_folder_name = {
-        "CSIDL_APPDATA": "AppData",
-        "CSIDL_COMMON_APPDATA": "Common AppData",
-        "CSIDL_LOCAL_APPDATA": "Local AppData",
-    }[csidl_name]
-
-    key = _winreg.OpenKey(
-        _winreg.HKEY_CURRENT_USER,
-        r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
-    )
-    dir, type = _winreg.QueryValueEx(key, shell_folder_name)
-    return dir
-
-
-def _get_win_folder_with_pywin32(csidl_name):
-    from win32com.shell import shellcon, shell
-    dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
-    # Try to make this a unicode path because SHGetFolderPath does
-    # not return unicode strings when there is unicode data in the
-    # path.
-    try:
-        dir = unicode(dir)
-
-        # Downgrade to short path name if have highbit chars. See
-        # .
-        has_high_char = False
-        for c in dir:
-            if ord(c) > 255:
-                has_high_char = True
-                break
-        if has_high_char:
-            try:
-                import win32api
-                dir = win32api.GetShortPathName(dir)
-            except ImportError:
-                pass
-    except UnicodeError:
-        pass
-    return dir
-
-
-def _get_win_folder_with_ctypes(csidl_name):
-    import ctypes
-
-    csidl_const = {
-        "CSIDL_APPDATA": 26,
-        "CSIDL_COMMON_APPDATA": 35,
-        "CSIDL_LOCAL_APPDATA": 28,
-    }[csidl_name]
-
-    buf = ctypes.create_unicode_buffer(1024)
-    ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
-
-    # Downgrade to short path name if have highbit chars. See
-    # .
-    has_high_char = False
-    for c in buf:
-        if ord(c) > 255:
-            has_high_char = True
-            break
-    if has_high_char:
-        buf2 = ctypes.create_unicode_buffer(1024)
-        if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
-            buf = buf2
-
-    return buf.value
-
-def _get_win_folder_with_jna(csidl_name):
-    import array
-    from com.sun import jna
-    from com.sun.jna.platform import win32
-
-    buf_size = win32.WinDef.MAX_PATH * 2
-    buf = array.zeros('c', buf_size)
-    shell = win32.Shell32.INSTANCE
-    shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
-    dir = jna.Native.toString(buf.tostring()).rstrip("\0")
-
-    # Downgrade to short path name if have highbit chars. See
-    # .
-    has_high_char = False
-    for c in dir:
-        if ord(c) > 255:
-            has_high_char = True
-            break
-    if has_high_char:
-        buf = array.zeros('c', buf_size)
-        kernel = win32.Kernel32.INSTANCE
-        if kernel.GetShortPathName(dir, buf, buf_size):
-            dir = jna.Native.toString(buf.tostring()).rstrip("\0")
-
-    return dir
-
-if system == "win32":
-    try:
-        from ctypes import windll
-        _get_win_folder = _get_win_folder_with_ctypes
-    except ImportError:
-        try:
-            import com.sun.jna
-            _get_win_folder = _get_win_folder_with_jna
-        except ImportError:
-            _get_win_folder = _get_win_folder_from_registry
-
-
-#---- self test code
-
-if __name__ == "__main__":
-    appname = "MyApp"
-    appauthor = "MyCompany"
-
-    props = ("user_data_dir",
-             "user_config_dir",
-             "user_cache_dir",
-             "user_state_dir",
-             "user_log_dir",
-             "site_data_dir",
-             "site_config_dir")
-
-    print("-- app dirs %s --" % __version__)
-
-    print("-- app dirs (with optional 'version')")
-    dirs = AppDirs(appname, appauthor, version="1.0")
-    for prop in props:
-        print("%s: %s" % (prop, getattr(dirs, prop)))
-
-    print("\n-- app dirs (without optional 'version')")
-    dirs = AppDirs(appname, appauthor)
-    for prop in props:
-        print("%s: %s" % (prop, getattr(dirs, prop)))
-
-    print("\n-- app dirs (without optional 'appauthor')")
-    dirs = AppDirs(appname)
-    for prop in props:
-        print("%s: %s" % (prop, getattr(dirs, prop)))
-
-    print("\n-- app dirs (with disabled 'appauthor')")
-    dirs = AppDirs(appname, appauthor=False)
-    for prop in props:
-        print("%s: %s" % (prop, getattr(dirs, prop)))
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py
deleted file mode 100644
index 8fdee66..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""CacheControl import Interface.
-
-Make it easy to import from cachecontrol without long namespaces.
-"""
-__author__ = "Eric Larson"
-__email__ = "eric@ionrock.org"
-__version__ = "0.12.5"
-
-from .wrapper import CacheControl
-from .adapter import CacheControlAdapter
-from .controller import CacheController
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py
deleted file mode 100644
index f1e0ad9..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/_cmd.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import logging
-
-from pip._vendor import requests
-
-from pip._vendor.cachecontrol.adapter import CacheControlAdapter
-from pip._vendor.cachecontrol.cache import DictCache
-from pip._vendor.cachecontrol.controller import logger
-
-from argparse import ArgumentParser
-
-
-def setup_logging():
-    logger.setLevel(logging.DEBUG)
-    handler = logging.StreamHandler()
-    logger.addHandler(handler)
-
-
-def get_session():
-    adapter = CacheControlAdapter(
-        DictCache(), cache_etags=True, serializer=None, heuristic=None
-    )
-    sess = requests.Session()
-    sess.mount("http://", adapter)
-    sess.mount("https://", adapter)
-
-    sess.cache_controller = adapter.controller
-    return sess
-
-
-def get_args():
-    parser = ArgumentParser()
-    parser.add_argument("url", help="The URL to try and cache")
-    return parser.parse_args()
-
-
-def main(args=None):
-    args = get_args()
-    sess = get_session()
-
-    # Make a request to get a response
-    resp = sess.get(args.url)
-
-    # Turn on logging
-    setup_logging()
-
-    # try setting the cache
-    sess.cache_controller.cache_response(resp.request, resp.raw)
-
-    # Now try to get it
-    if sess.cache_controller.cached_request(resp.request):
-        print("Cached!")
-    else:
-        print("Not cached :(")
-
-
-if __name__ == "__main__":
-    main()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py
deleted file mode 100644
index 780eb28..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/adapter.py
+++ /dev/null
@@ -1,133 +0,0 @@
-import types
-import functools
-import zlib
-
-from pip._vendor.requests.adapters import HTTPAdapter
-
-from .controller import CacheController
-from .cache import DictCache
-from .filewrapper import CallbackFileWrapper
-
-
-class CacheControlAdapter(HTTPAdapter):
-    invalidating_methods = {"PUT", "DELETE"}
-
-    def __init__(
-        self,
-        cache=None,
-        cache_etags=True,
-        controller_class=None,
-        serializer=None,
-        heuristic=None,
-        cacheable_methods=None,
-        *args,
-        **kw
-    ):
-        super(CacheControlAdapter, self).__init__(*args, **kw)
-        self.cache = cache or DictCache()
-        self.heuristic = heuristic
-        self.cacheable_methods = cacheable_methods or ("GET",)
-
-        controller_factory = controller_class or CacheController
-        self.controller = controller_factory(
-            self.cache, cache_etags=cache_etags, serializer=serializer
-        )
-
-    def send(self, request, cacheable_methods=None, **kw):
-        """
-        Send a request. Use the request information to see if it
-        exists in the cache and cache the response if we need to and can.
-        """
-        cacheable = cacheable_methods or self.cacheable_methods
-        if request.method in cacheable:
-            try:
-                cached_response = self.controller.cached_request(request)
-            except zlib.error:
-                cached_response = None
-            if cached_response:
-                return self.build_response(request, cached_response, from_cache=True)
-
-            # check for etags and add headers if appropriate
-            request.headers.update(self.controller.conditional_headers(request))
-
-        resp = super(CacheControlAdapter, self).send(request, **kw)
-
-        return resp
-
-    def build_response(
-        self, request, response, from_cache=False, cacheable_methods=None
-    ):
-        """
-        Build a response by making a request or using the cache.
-
-        This will end up calling send and returning a potentially
-        cached response
-        """
-        cacheable = cacheable_methods or self.cacheable_methods
-        if not from_cache and request.method in cacheable:
-            # Check for any heuristics that might update headers
-            # before trying to cache.
-            if self.heuristic:
-                response = self.heuristic.apply(response)
-
-            # apply any expiration heuristics
-            if response.status == 304:
-                # We must have sent an ETag request. This could mean
-                # that we've been expired already or that we simply
-                # have an etag. In either case, we want to try and
-                # update the cache if that is the case.
-                cached_response = self.controller.update_cached_response(
-                    request, response
-                )
-
-                if cached_response is not response:
-                    from_cache = True
-
-                # We are done with the server response, read a
-                # possible response body (compliant servers will
-                # not return one, but we cannot be 100% sure) and
-                # release the connection back to the pool.
-                response.read(decode_content=False)
-                response.release_conn()
-
-                response = cached_response
-
-            # We always cache the 301 responses
-            elif response.status == 301:
-                self.controller.cache_response(request, response)
-            else:
-                # Wrap the response file with a wrapper that will cache the
-                #   response when the stream has been consumed.
-                response._fp = CallbackFileWrapper(
-                    response._fp,
-                    functools.partial(
-                        self.controller.cache_response, request, response
-                    ),
-                )
-                if response.chunked:
-                    super_update_chunk_length = response._update_chunk_length
-
-                    def _update_chunk_length(self):
-                        super_update_chunk_length()
-                        if self.chunk_left == 0:
-                            self._fp._close()
-
-                    response._update_chunk_length = types.MethodType(
-                        _update_chunk_length, response
-                    )
-
-        resp = super(CacheControlAdapter, self).build_response(request, response)
-
-        # See if we should invalidate the cache.
-        if request.method in self.invalidating_methods and resp.ok:
-            cache_url = self.controller.cache_url(request.url)
-            self.cache.delete(cache_url)
-
-        # Give the request a from_cache attr to let people use it
-        resp.from_cache = from_cache
-
-        return resp
-
-    def close(self):
-        self.cache.close()
-        super(CacheControlAdapter, self).close()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py
deleted file mode 100644
index 94e0773..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/cache.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-The cache object API for implementing caches. The default is a thread
-safe in-memory dictionary.
-"""
-from threading import Lock
-
-
-class BaseCache(object):
-
-    def get(self, key):
-        raise NotImplementedError()
-
-    def set(self, key, value):
-        raise NotImplementedError()
-
-    def delete(self, key):
-        raise NotImplementedError()
-
-    def close(self):
-        pass
-
-
-class DictCache(BaseCache):
-
-    def __init__(self, init_dict=None):
-        self.lock = Lock()
-        self.data = init_dict or {}
-
-    def get(self, key):
-        return self.data.get(key, None)
-
-    def set(self, key, value):
-        with self.lock:
-            self.data.update({key: value})
-
-    def delete(self, key):
-        with self.lock:
-            if key in self.data:
-                self.data.pop(key)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
deleted file mode 100644
index 0e1658f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .file_cache import FileCache  # noqa
-from .redis_cache import RedisCache  # noqa
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
deleted file mode 100644
index 607b945..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import hashlib
-import os
-from textwrap import dedent
-
-from ..cache import BaseCache
-from ..controller import CacheController
-
-try:
-    FileNotFoundError
-except NameError:
-    # py2.X
-    FileNotFoundError = (IOError, OSError)
-
-
-def _secure_open_write(filename, fmode):
-    # We only want to write to this file, so open it in write only mode
-    flags = os.O_WRONLY
-
-    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
-    #  will open *new* files.
-    # We specify this because we want to ensure that the mode we pass is the
-    # mode of the file.
-    flags |= os.O_CREAT | os.O_EXCL
-
-    # Do not follow symlinks to prevent someone from making a symlink that
-    # we follow and insecurely open a cache file.
-    if hasattr(os, "O_NOFOLLOW"):
-        flags |= os.O_NOFOLLOW
-
-    # On Windows we'll mark this file as binary
-    if hasattr(os, "O_BINARY"):
-        flags |= os.O_BINARY
-
-    # Before we open our file, we want to delete any existing file that is
-    # there
-    try:
-        os.remove(filename)
-    except (IOError, OSError):
-        # The file must not exist already, so we can just skip ahead to opening
-        pass
-
-    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
-    # race condition happens between the os.remove and this line, that an
-    # error will be raised. Because we utilize a lockfile this should only
-    # happen if someone is attempting to attack us.
-    fd = os.open(filename, flags, fmode)
-    try:
-        return os.fdopen(fd, "wb")
-
-    except:
-        # An error occurred wrapping our FD in a file object
-        os.close(fd)
-        raise
-
-
-class FileCache(BaseCache):
-
-    def __init__(
-        self,
-        directory,
-        forever=False,
-        filemode=0o0600,
-        dirmode=0o0700,
-        use_dir_lock=None,
-        lock_class=None,
-    ):
-
-        if use_dir_lock is not None and lock_class is not None:
-            raise ValueError("Cannot use use_dir_lock and lock_class together")
-
-        try:
-            from lockfile import LockFile
-            from lockfile.mkdirlockfile import MkdirLockFile
-        except ImportError:
-            notice = dedent(
-                """
-            NOTE: In order to use the FileCache you must have
-            lockfile installed. You can install it via pip:
-              pip install lockfile
-            """
-            )
-            raise ImportError(notice)
-
-        else:
-            if use_dir_lock:
-                lock_class = MkdirLockFile
-
-            elif lock_class is None:
-                lock_class = LockFile
-
-        self.directory = directory
-        self.forever = forever
-        self.filemode = filemode
-        self.dirmode = dirmode
-        self.lock_class = lock_class
-
-    @staticmethod
-    def encode(x):
-        return hashlib.sha224(x.encode()).hexdigest()
-
-    def _fn(self, name):
-        # NOTE: This method should not change as some may depend on it.
-        #       See: https://github.com/ionrock/cachecontrol/issues/63
-        hashed = self.encode(name)
-        parts = list(hashed[:5]) + [hashed]
-        return os.path.join(self.directory, *parts)
-
-    def get(self, key):
-        name = self._fn(key)
-        try:
-            with open(name, "rb") as fh:
-                return fh.read()
-
-        except FileNotFoundError:
-            return None
-
-    def set(self, key, value):
-        name = self._fn(key)
-
-        # Make sure the directory exists
-        try:
-            os.makedirs(os.path.dirname(name), self.dirmode)
-        except (IOError, OSError):
-            pass
-
-        with self.lock_class(name) as lock:
-            # Write our actual file
-            with _secure_open_write(lock.path, self.filemode) as fh:
-                fh.write(value)
-
-    def delete(self, key):
-        name = self._fn(key)
-        if not self.forever:
-            try:
-                os.remove(name)
-            except FileNotFoundError:
-                pass
-
-
-def url_to_file_path(url, filecache):
-    """Return the file cache path based on the URL.
-
-    This does not ensure the file exists!
-    """
-    key = CacheController.cache_url(url)
-    return filecache._fn(key)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
deleted file mode 100644
index ed705ce..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from __future__ import division
-
-from datetime import datetime
-from pip._vendor.cachecontrol.cache import BaseCache
-
-
-class RedisCache(BaseCache):
-
-    def __init__(self, conn):
-        self.conn = conn
-
-    def get(self, key):
-        return self.conn.get(key)
-
-    def set(self, key, value, expires=None):
-        if not expires:
-            self.conn.set(key, value)
-        else:
-            expires = expires - datetime.utcnow()
-            self.conn.setex(key, int(expires.total_seconds()), value)
-
-    def delete(self, key):
-        self.conn.delete(key)
-
-    def clear(self):
-        """Helper for clearing all the keys in a database. Use with
-        caution!"""
-        for key in self.conn.keys():
-            self.conn.delete(key)
-
-    def close(self):
-        """Redis uses connection pooling, no need to close the connection."""
-        pass
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py
deleted file mode 100644
index 33b5aed..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/compat.py
+++ /dev/null
@@ -1,29 +0,0 @@
-try:
-    from urllib.parse import urljoin
-except ImportError:
-    from urlparse import urljoin
-
-
-try:
-    import cPickle as pickle
-except ImportError:
-    import pickle
-
-
-# Handle the case where the requests module has been patched to not have
-# urllib3 bundled as part of its source.
-try:
-    from pip._vendor.requests.packages.urllib3.response import HTTPResponse
-except ImportError:
-    from pip._vendor.urllib3.response import HTTPResponse
-
-try:
-    from pip._vendor.requests.packages.urllib3.util import is_fp_closed
-except ImportError:
-    from pip._vendor.urllib3.util import is_fp_closed
-
-# Replicate some six behaviour
-try:
-    text_type = unicode
-except NameError:
-    text_type = str
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py
deleted file mode 100644
index 1b2b943..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/controller.py
+++ /dev/null
@@ -1,367 +0,0 @@
-"""
-The httplib2 algorithms ported for use with requests.
-"""
-import logging
-import re
-import calendar
-import time
-from email.utils import parsedate_tz
-
-from pip._vendor.requests.structures import CaseInsensitiveDict
-
-from .cache import DictCache
-from .serialize import Serializer
-
-
-logger = logging.getLogger(__name__)
-
-URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
-
-
-def parse_uri(uri):
-    """Parses a URI using the regex given in Appendix B of RFC 3986.
-
-        (scheme, authority, path, query, fragment) = parse_uri(uri)
-    """
-    groups = URI.match(uri).groups()
-    return (groups[1], groups[3], groups[4], groups[6], groups[8])
-
-
-class CacheController(object):
-    """An interface to see if request should cached or not.
-    """
-
-    def __init__(
-        self, cache=None, cache_etags=True, serializer=None, status_codes=None
-    ):
-        self.cache = cache or DictCache()
-        self.cache_etags = cache_etags
-        self.serializer = serializer or Serializer()
-        self.cacheable_status_codes = status_codes or (200, 203, 300, 301)
-
-    @classmethod
-    def _urlnorm(cls, uri):
-        """Normalize the URL to create a safe key for the cache"""
-        (scheme, authority, path, query, fragment) = parse_uri(uri)
-        if not scheme or not authority:
-            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
-
-        scheme = scheme.lower()
-        authority = authority.lower()
-
-        if not path:
-            path = "/"
-
-        # Could do syntax based normalization of the URI before
-        # computing the digest. See Section 6.2.2 of Std 66.
-        request_uri = query and "?".join([path, query]) or path
-        defrag_uri = scheme + "://" + authority + request_uri
-
-        return defrag_uri
-
-    @classmethod
-    def cache_url(cls, uri):
-        return cls._urlnorm(uri)
-
-    def parse_cache_control(self, headers):
-        known_directives = {
-            # https://tools.ietf.org/html/rfc7234#section-5.2
-            "max-age": (int, True),
-            "max-stale": (int, False),
-            "min-fresh": (int, True),
-            "no-cache": (None, False),
-            "no-store": (None, False),
-            "no-transform": (None, False),
-            "only-if-cached": (None, False),
-            "must-revalidate": (None, False),
-            "public": (None, False),
-            "private": (None, False),
-            "proxy-revalidate": (None, False),
-            "s-maxage": (int, True),
-        }
-
-        cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
-
-        retval = {}
-
-        for cc_directive in cc_headers.split(","):
-            if not cc_directive.strip():
-                continue
-
-            parts = cc_directive.split("=", 1)
-            directive = parts[0].strip()
-
-            try:
-                typ, required = known_directives[directive]
-            except KeyError:
-                logger.debug("Ignoring unknown cache-control directive: %s", directive)
-                continue
-
-            if not typ or not required:
-                retval[directive] = None
-            if typ:
-                try:
-                    retval[directive] = typ(parts[1].strip())
-                except IndexError:
-                    if required:
-                        logger.debug(
-                            "Missing value for cache-control " "directive: %s",
-                            directive,
-                        )
-                except ValueError:
-                    logger.debug(
-                        "Invalid value for cache-control directive " "%s, must be %s",
-                        directive,
-                        typ.__name__,
-                    )
-
-        return retval
-
-    def cached_request(self, request):
-        """
-        Return a cached response if it exists in the cache, otherwise
-        return False.
-        """
-        cache_url = self.cache_url(request.url)
-        logger.debug('Looking up "%s" in the cache', cache_url)
-        cc = self.parse_cache_control(request.headers)
-
-        # Bail out if the request insists on fresh data
-        if "no-cache" in cc:
-            logger.debug('Request header has "no-cache", cache bypassed')
-            return False
-
-        if "max-age" in cc and cc["max-age"] == 0:
-            logger.debug('Request header has "max_age" as 0, cache bypassed')
-            return False
-
-        # Request allows serving from the cache, let's see if we find something
-        cache_data = self.cache.get(cache_url)
-        if cache_data is None:
-            logger.debug("No cache entry available")
-            return False
-
-        # Check whether it can be deserialized
-        resp = self.serializer.loads(request, cache_data)
-        if not resp:
-            logger.warning("Cache entry deserialization failed, entry ignored")
-            return False
-
-        # If we have a cached 301, return it immediately. We don't
-        # need to test our response for other headers b/c it is
-        # intrinsically "cacheable" as it is Permanent.
-        # See:
-        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
-        #
-        # Client can try to refresh the value by repeating the request
-        # with cache busting headers as usual (ie no-cache).
-        if resp.status == 301:
-            msg = (
-                'Returning cached "301 Moved Permanently" response '
-                "(ignoring date and etag information)"
-            )
-            logger.debug(msg)
-            return resp
-
-        headers = CaseInsensitiveDict(resp.headers)
-        if not headers or "date" not in headers:
-            if "etag" not in headers:
-                # Without date or etag, the cached response can never be used
-                # and should be deleted.
-                logger.debug("Purging cached response: no date or etag")
-                self.cache.delete(cache_url)
-            logger.debug("Ignoring cached response: no date")
-            return False
-
-        now = time.time()
-        date = calendar.timegm(parsedate_tz(headers["date"]))
-        current_age = max(0, now - date)
-        logger.debug("Current age based on date: %i", current_age)
-
-        # TODO: There is an assumption that the result will be a
-        #       urllib3 response object. This may not be best since we
-        #       could probably avoid instantiating or constructing the
-        #       response until we know we need it.
-        resp_cc = self.parse_cache_control(headers)
-
-        # determine freshness
-        freshness_lifetime = 0
-
-        # Check the max-age pragma in the cache control header
-        if "max-age" in resp_cc:
-            freshness_lifetime = resp_cc["max-age"]
-            logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
-
-        # If there isn't a max-age, check for an expires header
-        elif "expires" in headers:
-            expires = parsedate_tz(headers["expires"])
-            if expires is not None:
-                expire_time = calendar.timegm(expires) - date
-                freshness_lifetime = max(0, expire_time)
-                logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
-
-        # Determine if we are setting freshness limit in the
-        # request. Note, this overrides what was in the response.
-        if "max-age" in cc:
-            freshness_lifetime = cc["max-age"]
-            logger.debug(
-                "Freshness lifetime from request max-age: %i", freshness_lifetime
-            )
-
-        if "min-fresh" in cc:
-            min_fresh = cc["min-fresh"]
-            # adjust our current age by our min fresh
-            current_age += min_fresh
-            logger.debug("Adjusted current age from min-fresh: %i", current_age)
-
-        # Return entry if it is fresh enough
-        if freshness_lifetime > current_age:
-            logger.debug('The response is "fresh", returning cached response')
-            logger.debug("%i > %i", freshness_lifetime, current_age)
-            return resp
-
-        # we're not fresh. If we don't have an Etag, clear it out
-        if "etag" not in headers:
-            logger.debug('The cached response is "stale" with no etag, purging')
-            self.cache.delete(cache_url)
-
-        # return the original handler
-        return False
-
-    def conditional_headers(self, request):
-        cache_url = self.cache_url(request.url)
-        resp = self.serializer.loads(request, self.cache.get(cache_url))
-        new_headers = {}
-
-        if resp:
-            headers = CaseInsensitiveDict(resp.headers)
-
-            if "etag" in headers:
-                new_headers["If-None-Match"] = headers["ETag"]
-
-            if "last-modified" in headers:
-                new_headers["If-Modified-Since"] = headers["Last-Modified"]
-
-        return new_headers
-
-    def cache_response(self, request, response, body=None, status_codes=None):
-        """
-        Algorithm for caching requests.
-
-        This assumes a requests Response object.
-        """
-        # From httplib2: Don't cache 206's since we aren't going to
-        #                handle byte range requests
-        cacheable_status_codes = status_codes or self.cacheable_status_codes
-        if response.status not in cacheable_status_codes:
-            logger.debug(
-                "Status code %s not in %s", response.status, cacheable_status_codes
-            )
-            return
-
-        response_headers = CaseInsensitiveDict(response.headers)
-
-        # If we've been given a body, our response has a Content-Length, that
-        # Content-Length is valid then we can check to see if the body we've
-        # been given matches the expected size, and if it doesn't we'll just
-        # skip trying to cache it.
-        if (
-            body is not None
-            and "content-length" in response_headers
-            and response_headers["content-length"].isdigit()
-            and int(response_headers["content-length"]) != len(body)
-        ):
-            return
-
-        cc_req = self.parse_cache_control(request.headers)
-        cc = self.parse_cache_control(response_headers)
-
-        cache_url = self.cache_url(request.url)
-        logger.debug('Updating cache with response from "%s"', cache_url)
-
-        # Delete it from the cache if we happen to have it stored there
-        no_store = False
-        if "no-store" in cc:
-            no_store = True
-            logger.debug('Response header has "no-store"')
-        if "no-store" in cc_req:
-            no_store = True
-            logger.debug('Request header has "no-store"')
-        if no_store and self.cache.get(cache_url):
-            logger.debug('Purging existing cache entry to honor "no-store"')
-            self.cache.delete(cache_url)
-        if no_store:
-            return
-
-        # If we've been given an etag, then keep the response
-        if self.cache_etags and "etag" in response_headers:
-            logger.debug("Caching due to etag")
-            self.cache.set(
-                cache_url, self.serializer.dumps(request, response, body=body)
-            )
-
-        # Add to the cache any 301s. We do this before looking that
-        # the Date headers.
-        elif response.status == 301:
-            logger.debug("Caching permanant redirect")
-            self.cache.set(cache_url, self.serializer.dumps(request, response))
-
-        # Add to the cache if the response headers demand it. If there
-        # is no date header then we can't do anything about expiring
-        # the cache.
-        elif "date" in response_headers:
-            # cache when there is a max-age > 0
-            if "max-age" in cc and cc["max-age"] > 0:
-                logger.debug("Caching b/c date exists and max-age > 0")
-                self.cache.set(
-                    cache_url, self.serializer.dumps(request, response, body=body)
-                )
-
-            # If the request can expire, it means we should cache it
-            # in the meantime.
-            elif "expires" in response_headers:
-                if response_headers["expires"]:
-                    logger.debug("Caching b/c of expires header")
-                    self.cache.set(
-                        cache_url, self.serializer.dumps(request, response, body=body)
-                    )
-
-    def update_cached_response(self, request, response):
-        """On a 304 we will get a new set of headers that we want to
-        update our cached value with, assuming we have one.
-
-        This should only ever be called when we've sent an ETag and
-        gotten a 304 as the response.
-        """
-        cache_url = self.cache_url(request.url)
-
-        cached_response = self.serializer.loads(request, self.cache.get(cache_url))
-
-        if not cached_response:
-            # we didn't have a cached response
-            return response
-
-        # Lets update our headers with the headers from the new request:
-        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
-        #
-        # The server isn't supposed to send headers that would make
-        # the cached body invalid. But... just in case, we'll be sure
-        # to strip out ones we know that might be problmatic due to
-        # typical assumptions.
-        excluded_headers = ["content-length"]
-
-        cached_response.headers.update(
-            dict(
-                (k, v)
-                for k, v in response.headers.items()
-                if k.lower() not in excluded_headers
-            )
-        )
-
-        # we want a 200 b/c we have content via the cache
-        cached_response.status = 200
-
-        # update our cache
-        self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
-
-        return cached_response
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py
deleted file mode 100644
index 30ed4c5..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from io import BytesIO
-
-
-class CallbackFileWrapper(object):
-    """
-    Small wrapper around a fp object which will tee everything read into a
-    buffer, and when that file is closed it will execute a callback with the
-    contents of that buffer.
-
-    All attributes are proxied to the underlying file object.
-
-    This class uses members with a double underscore (__) leading prefix so as
-    not to accidentally shadow an attribute.
-    """
-
-    def __init__(self, fp, callback):
-        self.__buf = BytesIO()
-        self.__fp = fp
-        self.__callback = callback
-
-    def __getattr__(self, name):
-        # The vaguaries of garbage collection means that self.__fp is
-        # not always set.  By using __getattribute__ and the private
-        # name[0] allows looking up the attribute value and raising an
-        # AttributeError when it doesn't exist. This stop thigns from
-        # infinitely recursing calls to getattr in the case where
-        # self.__fp hasn't been set.
-        #
-        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
-        fp = self.__getattribute__("_CallbackFileWrapper__fp")
-        return getattr(fp, name)
-
-    def __is_fp_closed(self):
-        try:
-            return self.__fp.fp is None
-
-        except AttributeError:
-            pass
-
-        try:
-            return self.__fp.closed
-
-        except AttributeError:
-            pass
-
-        # We just don't cache it then.
-        # TODO: Add some logging here...
-        return False
-
-    def _close(self):
-        if self.__callback:
-            self.__callback(self.__buf.getvalue())
-
-        # We assign this to None here, because otherwise we can get into
-        # really tricky problems where the CPython interpreter dead locks
-        # because the callback is holding a reference to something which
-        # has a __del__ method. Setting this to None breaks the cycle
-        # and allows the garbage collector to do it's thing normally.
-        self.__callback = None
-
-    def read(self, amt=None):
-        data = self.__fp.read(amt)
-        self.__buf.write(data)
-        if self.__is_fp_closed():
-            self._close()
-
-        return data
-
-    def _safe_read(self, amt):
-        data = self.__fp._safe_read(amt)
-        if amt == 2 and data == b"\r\n":
-            # urllib executes this read to toss the CRLF at the end
-            # of the chunk.
-            return data
-
-        self.__buf.write(data)
-        if self.__is_fp_closed():
-            self._close()
-
-        return data
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py
deleted file mode 100644
index 6c0e979..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/heuristics.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import calendar
-import time
-
-from email.utils import formatdate, parsedate, parsedate_tz
-
-from datetime import datetime, timedelta
-
-TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
-
-
-def expire_after(delta, date=None):
-    date = date or datetime.utcnow()
-    return date + delta
-
-
-def datetime_to_header(dt):
-    return formatdate(calendar.timegm(dt.timetuple()))
-
-
-class BaseHeuristic(object):
-
-    def warning(self, response):
-        """
-        Return a valid 1xx warning header value describing the cache
-        adjustments.
-
-        The response is provided too allow warnings like 113
-        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
-        to explicitly say response is over 24 hours old.
-        """
-        return '110 - "Response is Stale"'
-
-    def update_headers(self, response):
-        """Update the response headers with any new headers.
-
-        NOTE: This SHOULD always include some Warning header to
-              signify that the response was cached by the client, not
-              by way of the provided headers.
-        """
-        return {}
-
-    def apply(self, response):
-        updated_headers = self.update_headers(response)
-
-        if updated_headers:
-            response.headers.update(updated_headers)
-            warning_header_value = self.warning(response)
-            if warning_header_value is not None:
-                response.headers.update({"Warning": warning_header_value})
-
-        return response
-
-
-class OneDayCache(BaseHeuristic):
-    """
-    Cache the response by providing an expires 1 day in the
-    future.
-    """
-
-    def update_headers(self, response):
-        headers = {}
-
-        if "expires" not in response.headers:
-            date = parsedate(response.headers["date"])
-            expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
-            headers["expires"] = datetime_to_header(expires)
-            headers["cache-control"] = "public"
-        return headers
-
-
-class ExpiresAfter(BaseHeuristic):
-    """
-    Cache **all** requests for a defined time period.
-    """
-
-    def __init__(self, **kw):
-        self.delta = timedelta(**kw)
-
-    def update_headers(self, response):
-        expires = expire_after(self.delta)
-        return {"expires": datetime_to_header(expires), "cache-control": "public"}
-
-    def warning(self, response):
-        tmpl = "110 - Automatically cached for %s. Response might be stale"
-        return tmpl % self.delta
-
-
-class LastModified(BaseHeuristic):
-    """
-    If there is no Expires header already, fall back on Last-Modified
-    using the heuristic from
-    http://tools.ietf.org/html/rfc7234#section-4.2.2
-    to calculate a reasonable value.
-
-    Firefox also does something like this per
-    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
-    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
-    Unlike mozilla we limit this to 24-hr.
-    """
-    cacheable_by_default_statuses = {
-        200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
-    }
-
-    def update_headers(self, resp):
-        headers = resp.headers
-
-        if "expires" in headers:
-            return {}
-
-        if "cache-control" in headers and headers["cache-control"] != "public":
-            return {}
-
-        if resp.status not in self.cacheable_by_default_statuses:
-            return {}
-
-        if "date" not in headers or "last-modified" not in headers:
-            return {}
-
-        date = calendar.timegm(parsedate_tz(headers["date"]))
-        last_modified = parsedate(headers["last-modified"])
-        if date is None or last_modified is None:
-            return {}
-
-        now = time.time()
-        current_age = max(0, now - date)
-        delta = date - calendar.timegm(last_modified)
-        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
-        if freshness_lifetime <= current_age:
-            return {}
-
-        expires = date + freshness_lifetime
-        return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
-
-    def warning(self, resp):
-        return None
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py
deleted file mode 100644
index ec43ff2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/serialize.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import base64
-import io
-import json
-import zlib
-
-from pip._vendor import msgpack
-from pip._vendor.requests.structures import CaseInsensitiveDict
-
-from .compat import HTTPResponse, pickle, text_type
-
-
-def _b64_decode_bytes(b):
-    return base64.b64decode(b.encode("ascii"))
-
-
-def _b64_decode_str(s):
-    return _b64_decode_bytes(s).decode("utf8")
-
-
-class Serializer(object):
-
-    def dumps(self, request, response, body=None):
-        response_headers = CaseInsensitiveDict(response.headers)
-
-        if body is None:
-            body = response.read(decode_content=False)
-
-            # NOTE: 99% sure this is dead code. I'm only leaving it
-            #       here b/c I don't have a test yet to prove
-            #       it. Basically, before using
-            #       `cachecontrol.filewrapper.CallbackFileWrapper`,
-            #       this made an effort to reset the file handle. The
-            #       `CallbackFileWrapper` short circuits this code by
-            #       setting the body as the content is consumed, the
-            #       result being a `body` argument is *always* passed
-            #       into cache_response, and in turn,
-            #       `Serializer.dump`.
-            response._fp = io.BytesIO(body)
-
-        # NOTE: This is all a bit weird, but it's really important that on
-        #       Python 2.x these objects are unicode and not str, even when
-        #       they contain only ascii. The problem here is that msgpack
-        #       understands the difference between unicode and bytes and we
-        #       have it set to differentiate between them, however Python 2
-        #       doesn't know the difference. Forcing these to unicode will be
-        #       enough to have msgpack know the difference.
-        data = {
-            u"response": {
-                u"body": body,
-                u"headers": dict(
-                    (text_type(k), text_type(v)) for k, v in response.headers.items()
-                ),
-                u"status": response.status,
-                u"version": response.version,
-                u"reason": text_type(response.reason),
-                u"strict": response.strict,
-                u"decode_content": response.decode_content,
-            }
-        }
-
-        # Construct our vary headers
-        data[u"vary"] = {}
-        if u"vary" in response_headers:
-            varied_headers = response_headers[u"vary"].split(",")
-            for header in varied_headers:
-                header = text_type(header).strip()
-                header_value = request.headers.get(header, None)
-                if header_value is not None:
-                    header_value = text_type(header_value)
-                data[u"vary"][header] = header_value
-
-        return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
-
-    def loads(self, request, data):
-        # Short circuit if we've been given an empty set of data
-        if not data:
-            return
-
-        # Determine what version of the serializer the data was serialized
-        # with
-        try:
-            ver, data = data.split(b",", 1)
-        except ValueError:
-            ver = b"cc=0"
-
-        # Make sure that our "ver" is actually a version and isn't a false
-        # positive from a , being in the data stream.
-        if ver[:3] != b"cc=":
-            data = ver + data
-            ver = b"cc=0"
-
-        # Get the version number out of the cc=N
-        ver = ver.split(b"=", 1)[-1].decode("ascii")
-
-        # Dispatch to the actual load method for the given version
-        try:
-            return getattr(self, "_loads_v{}".format(ver))(request, data)
-
-        except AttributeError:
-            # This is a version we don't have a loads function for, so we'll
-            # just treat it as a miss and return None
-            return
-
-    def prepare_response(self, request, cached):
-        """Verify our vary headers match and construct a real urllib3
-        HTTPResponse object.
-        """
-        # Special case the '*' Vary value as it means we cannot actually
-        # determine if the cached response is suitable for this request.
-        if "*" in cached.get("vary", {}):
-            return
-
-        # Ensure that the Vary headers for the cached response match our
-        # request
-        for header, value in cached.get("vary", {}).items():
-            if request.headers.get(header, None) != value:
-                return
-
-        body_raw = cached["response"].pop("body")
-
-        headers = CaseInsensitiveDict(data=cached["response"]["headers"])
-        if headers.get("transfer-encoding", "") == "chunked":
-            headers.pop("transfer-encoding")
-
-        cached["response"]["headers"] = headers
-
-        try:
-            body = io.BytesIO(body_raw)
-        except TypeError:
-            # This can happen if cachecontrol serialized to v1 format (pickle)
-            # using Python 2. A Python 2 str(byte string) will be unpickled as
-            # a Python 3 str (unicode string), which will cause the above to
-            # fail with:
-            #
-            #     TypeError: 'str' does not support the buffer interface
-            body = io.BytesIO(body_raw.encode("utf8"))
-
-        return HTTPResponse(body=body, preload_content=False, **cached["response"])
-
-    def _loads_v0(self, request, data):
-        # The original legacy cache data. This doesn't contain enough
-        # information to construct everything we need, so we'll treat this as
-        # a miss.
-        return
-
-    def _loads_v1(self, request, data):
-        try:
-            cached = pickle.loads(data)
-        except ValueError:
-            return
-
-        return self.prepare_response(request, cached)
-
-    def _loads_v2(self, request, data):
-        try:
-            cached = json.loads(zlib.decompress(data).decode("utf8"))
-        except (ValueError, zlib.error):
-            return
-
-        # We need to decode the items that we've base64 encoded
-        cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
-        cached["response"]["headers"] = dict(
-            (_b64_decode_str(k), _b64_decode_str(v))
-            for k, v in cached["response"]["headers"].items()
-        )
-        cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
-        cached["vary"] = dict(
-            (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
-            for k, v in cached["vary"].items()
-        )
-
-        return self.prepare_response(request, cached)
-
-    def _loads_v3(self, request, data):
-        # Due to Python 2 encoding issues, it's impossible to know for sure
-        # exactly how to load v3 entries, thus we'll treat these as a miss so
-        # that they get rewritten out as v4 entries.
-        return
-
-    def _loads_v4(self, request, data):
-        try:
-            cached = msgpack.loads(data, encoding="utf-8")
-        except ValueError:
-            return
-
-        return self.prepare_response(request, cached)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py
deleted file mode 100644
index 265bfc8..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/cachecontrol/wrapper.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from .adapter import CacheControlAdapter
-from .cache import DictCache
-
-
-def CacheControl(
-    sess,
-    cache=None,
-    cache_etags=True,
-    serializer=None,
-    heuristic=None,
-    controller_class=None,
-    adapter_class=None,
-    cacheable_methods=None,
-):
-
-    cache = cache or DictCache()
-    adapter_class = adapter_class or CacheControlAdapter
-    adapter = adapter_class(
-        cache,
-        cache_etags=cache_etags,
-        serializer=serializer,
-        heuristic=heuristic,
-        controller_class=controller_class,
-        cacheable_methods=cacheable_methods,
-    )
-    sess.mount("http://", adapter)
-    sess.mount("https://", adapter)
-
-    return sess
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py
deleted file mode 100644
index 8e358e4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .core import where
-
-__version__ = "2019.09.11"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py
deleted file mode 100644
index ae2aff5..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/__main__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from pip._vendor.certifi import where
-print(where())
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem
deleted file mode 100644
index 70fa91f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/cacert.pem
+++ /dev/null
@@ -1,4558 +0,0 @@
-
-# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
-# Label: "GlobalSign Root CA"
-# Serial: 4835703278459707669005204
-# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
-# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
-# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
-# Label: "GlobalSign Root CA - R2"
-# Serial: 4835703278459682885658125
-# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
-# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
-# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
-# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
-# Serial: 206684696279472310254277870180966723415
-# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
-# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
-# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
------BEGIN CERTIFICATE-----
-MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
-CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
-cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
-LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
-aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
-dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
-VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
-aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
-bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
-IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
-N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
-KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
-kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
-CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
-Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
-imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
-2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
-DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
-/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
-F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
-TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
-# Label: "Entrust.net Premium 2048 Secure Server CA"
-# Serial: 946069240
-# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
-# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
-# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
-RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
-bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
-IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
-MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
-LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
-YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
-A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
-K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
-sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
-MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
-XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
-HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
-4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
-HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
-j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
-U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
-zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
-u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
-bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
-fF6adulZkMV8gzURZVE=
------END CERTIFICATE-----
-
-# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
-# Label: "Baltimore CyberTrust Root"
-# Serial: 33554617
-# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
-# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
-# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
-RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
-VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
-DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
-ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
-VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
-mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
-IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
-mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
-XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
-dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
-jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
-BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
-DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
-9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
-jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
-Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
-ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
-R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
------END CERTIFICATE-----
-
-# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
-# Label: "AddTrust External Root"
-# Serial: 1
-# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
-# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
-# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
------BEGIN CERTIFICATE-----
-MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
-MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
-IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
-MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
-FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
-bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
-dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
-H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
-uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
-mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
-a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
-E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
-WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
-VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
-Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
-cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
-IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
-AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
-YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
-6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
-Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
-c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
-mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
-# Label: "Entrust Root Certification Authority"
-# Serial: 1164660820
-# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
-# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
-# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
------BEGIN CERTIFICATE-----
-MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
-Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
-KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
-cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
-NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
-NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
-ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
-BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
-KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
-Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
-4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
-KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
-rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
-94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
-sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
-gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
-kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
-vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
-A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
-O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
-AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
-9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
-eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
-0vdXcDazv/wor3ElhVsT/h5/WrQ8
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
-# Label: "GeoTrust Global CA"
-# Serial: 144470
-# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
-# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
-# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
------BEGIN CERTIFICATE-----
-MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
-MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
-YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
-EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
-R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
-9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
-fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
-iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
-1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
-bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
-MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
-ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
-uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
-Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
-tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
-PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
-hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
-5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA"
-# Serial: 1
-# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
-# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
-# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
------BEGIN CERTIFICATE-----
-MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
-BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
-IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
-VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
-cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
-QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
-F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
-c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
-mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
-VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
-teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
-f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
-Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
-nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
-/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
-MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
-9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
-aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
-IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
-ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
-uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
-Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
-QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
-koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
-ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
-DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
-bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
-# Label: "GeoTrust Universal CA 2"
-# Serial: 1
-# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
-# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
-# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
------BEGIN CERTIFICATE-----
-MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
-MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
-c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
-VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
-c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
-WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
-FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
-XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
-se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
-KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
-IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
-y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
-hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
-QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
-Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
-HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
-KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
-dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
-L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
-Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
-ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
-T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
-GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
-1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
-OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
-6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
-QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
------END CERTIFICATE-----
-
-# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
-# Subject: CN=AAA Certificate Services O=Comodo CA Limited
-# Label: "Comodo AAA Services root"
-# Serial: 1
-# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
-# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
-# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
------BEGIN CERTIFICATE-----
-MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
-YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
-GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
-BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
-3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
-YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
-rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
-ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
-oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
-MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
-QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
-b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
-AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
-GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
-Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
-G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
-l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
-smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority
-# Label: "QuoVadis Root CA"
-# Serial: 985026699
-# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24
-# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9
-# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73
------BEGIN CERTIFICATE-----
-MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
-aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
-MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
-IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
-dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
-li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
-rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
-WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
-F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
-xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
-Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
-dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
-ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
-IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
-c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
-ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
-Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
-KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
-KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
-y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
-dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
-VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
-MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
-fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
-7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
-cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
-mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
-xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
-SnQ2+Q==
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2"
-# Serial: 1289
-# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
-# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
-# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
------BEGIN CERTIFICATE-----
-MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
-GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
-Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
-WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
-rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
-+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
-ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
-Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
-PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
-/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
-oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
-yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
-EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
-A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
-MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
-ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
-BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
-g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
-fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
-WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
-B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
-hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
-TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
-mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
-ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
-4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
-8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3"
-# Serial: 1478
-# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
-# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
-# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
------BEGIN CERTIFICATE-----
-MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
-GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
-b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
-BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
-YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
-V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
-4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
-H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
-8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
-vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
-mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
-btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
-T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
-WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
-c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
-4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
-VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
-CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
-aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
-aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
-dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
-czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
-A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
-TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
-Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
-7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
-d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
-+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
-4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
-t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
-DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
-k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
-zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
-Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
-mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
-4SVhM7JZG+Ju1zdXtg2pEto=
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1
-# Subject: O=SECOM Trust.net OU=Security Communication RootCA1
-# Label: "Security Communication Root CA"
-# Serial: 0
-# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a
-# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7
-# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c
------BEGIN CERTIFICATE-----
-MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
-MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
-dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
-WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
-VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
-9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
-DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
-Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
-QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
-xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
-A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
-AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
-kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
-Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
-Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
-JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
-RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
------END CERTIFICATE-----
-
-# Issuer: CN=Sonera Class2 CA O=Sonera
-# Subject: CN=Sonera Class2 CA O=Sonera
-# Label: "Sonera Class 2 Root CA"
-# Serial: 29
-# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb
-# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27
-# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27
------BEGIN CERTIFICATE-----
-MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
-MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
-MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
-BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
-Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
-5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
-3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
-vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
-8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
-DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
-MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
-zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
-3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
-FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
-Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
-ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
------END CERTIFICATE-----
-
-# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
-# Label: "XRamp Global CA Root"
-# Serial: 107108908803651509692980124233745014957
-# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
-# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
-# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
-gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
-MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
-UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
-NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
-dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
-dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
-dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
-38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
-KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
-DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
-qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
-JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
-PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
-BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
-jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
-eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
-ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
-vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
-qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
-IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
-i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
-O+7ETPTsJ3xCwnR8gooJybQDJbw=
------END CERTIFICATE-----
-
-# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
-# Label: "Go Daddy Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
-# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
-# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
------BEGIN CERTIFICATE-----
-MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
-MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
-YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
-MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
-ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
-MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
-ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
-PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
-wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
-EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
-avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
-YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
-sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
-/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
-IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
-ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
-OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
-TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
-HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
-dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
-ReYNnyicsbkqWletNw+vHX/bvZ8=
------END CERTIFICATE-----
-
-# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
-# Label: "Starfield Class 2 CA"
-# Serial: 0
-# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
-# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
-# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
------BEGIN CERTIFICATE-----
-MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
-MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
-U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
-NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
-ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
-ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
-DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
-8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
-+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
-X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
-K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
-1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
-A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
-zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
-YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
-bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
-DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
-L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
-eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
-xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
-VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
-WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
------END CERTIFICATE-----
-
-# Issuer: O=Government Root Certification Authority
-# Subject: O=Government Root Certification Authority
-# Label: "Taiwan GRCA"
-# Serial: 42023070807708724159991140556527066870
-# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e
-# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9
-# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3
------BEGIN CERTIFICATE-----
-MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
-MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
-PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
-Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
-AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
-IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
-gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
-yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
-F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
-jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
-ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
-VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
-YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
-EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
-Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
-DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
-MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
-UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
-TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
-qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
-ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
-JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
-hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
-EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
-nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
-udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
-ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
-LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
-pYYsfPQS
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root CA"
-# Serial: 17154717934120587862167794914071425081
-# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
-# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
-# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root CA"
-# Serial: 10944719598952040374951832963794454346
-# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
-# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
-# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
------BEGIN CERTIFICATE-----
-MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
-QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
-CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
-nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
-43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
-T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
-gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
-BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
-TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
-DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
-hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
-06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
-PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
-YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
-CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert High Assurance EV Root CA"
-# Serial: 3553400076410547919724730734378100087
-# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
-# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
-# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
-ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
-LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
-RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
-+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
-PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
-xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
-Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
-hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
-EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
-FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
-nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
-eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
-hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
-Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
-vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
-+OkuE6N36B9K
------END CERTIFICATE-----
-
-# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co.
-# Label: "DST Root CA X3"
-# Serial: 91299735575339953335919266965803778155
-# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5
-# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13
-# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39
------BEGIN CERTIFICATE-----
-MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
-MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
-DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
-PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
-Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
-AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
-rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
-OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
-xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
-7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
-aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
-SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
-ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
-AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
-R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
-JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
-Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
-# Label: "SwissSign Gold CA - G2"
-# Serial: 13492815561806991280
-# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
-# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
-# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
------BEGIN CERTIFICATE-----
-MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
-BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
-biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
-MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
-d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
-CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
-76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
-bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
-6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
-emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
-MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
-MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
-MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
-FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
-aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
-gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
-qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
-lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
-8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
-L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
-45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
-UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
-O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
-bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
-GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
-77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
-hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
-92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
-Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
-ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
-Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
------END CERTIFICATE-----
-
-# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
-# Label: "SwissSign Silver CA - G2"
-# Serial: 5700383053117599563
-# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
-# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
-# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
------BEGIN CERTIFICATE-----
-MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
-BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
-IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
-RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
-U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
-MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
-Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
-YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
-nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
-6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
-eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
-c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
-MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
-HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
-jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
-5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
-rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
-F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
-wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
-cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
-AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
-WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
-xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
-2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
-IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
-aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
-em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
-dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
-OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
-hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
-tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
-# Label: "GeoTrust Primary Certification Authority"
-# Serial: 32798226551256963324313806436981982369
-# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
-# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
-# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
------BEGIN CERTIFICATE-----
-MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
-MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
-R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
-MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
-Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
-AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
-ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
-7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
-kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
-mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
-KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
-6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
-4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
-oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
-UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
-AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA"
-# Serial: 69529181992039203566298953787712940909
-# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
-# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
-# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
-# Serial: 33037644167568058970164719475676101450
-# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
-# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
-# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
-# Subject: CN=SecureTrust CA O=SecureTrust Corporation
-# Label: "SecureTrust CA"
-# Serial: 17199774589125277788362757014266862032
-# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
-# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
-# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
------BEGIN CERTIFICATE-----
-MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
-MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
-cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
-AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
-Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
-0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
-wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
-7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
-8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
-BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
-JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
-NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
-6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
-3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
-D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
-CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
-3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
------END CERTIFICATE-----
-
-# Issuer: CN=Secure Global CA O=SecureTrust Corporation
-# Subject: CN=Secure Global CA O=SecureTrust Corporation
-# Label: "Secure Global CA"
-# Serial: 9751836167731051554232119481456978597
-# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
-# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
-# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
------BEGIN CERTIFICATE-----
-MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
-MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
-GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
-MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
-Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
-iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
-/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
-jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
-HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
-sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
-gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
-MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
-KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
-AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
-URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
-H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
-I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
-iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
-f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
-# Label: "COMODO Certification Authority"
-# Serial: 104350513648249232941998508985834464573
-# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
-# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
-# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
------BEGIN CERTIFICATE-----
-MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
-gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
-BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
-MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
-YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
-RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
-UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
-2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
-Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
-+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
-DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
-nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
-/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
-PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
-QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
-SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
-IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
-RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
-zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
-BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
-ZQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
-# Label: "Network Solutions Certificate Authority"
-# Serial: 116697915152937497490437556386812487904
-# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
-# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
-# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
------BEGIN CERTIFICATE-----
-MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
-MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
-MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
-dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
-UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
-ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
-c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
-OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
-mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
-BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
-qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
-gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
-BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
-bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
-dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
-6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
-h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
-/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
-wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
-pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
-# Label: "COMODO ECC Certification Authority"
-# Serial: 41578283867086692638256921589707938090
-# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
-# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
-# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
------BEGIN CERTIFICATE-----
-MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
-MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
-BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
-IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
-MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
-ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
-T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
-biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
-FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
-cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
-BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
-fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
-GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
------END CERTIFICATE-----
-
-# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed
-# Label: "OISTE WISeKey Global Root GA CA"
-# Serial: 86718877871133159090080555911823548314
-# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93
-# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9
-# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5
------BEGIN CERTIFICATE-----
-MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
-ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
-aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
-ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
-NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
-A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
-VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
-SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
-VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
-w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
-mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
-4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
-4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
-EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
-SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
-ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
-vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
-hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
-Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
-/L7fCg0=
------END CERTIFICATE-----
-
-# Issuer: CN=Certigna O=Dhimyotis
-# Subject: CN=Certigna O=Dhimyotis
-# Label: "Certigna"
-# Serial: 18364802974209362175
-# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
-# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
-# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
------BEGIN CERTIFICATE-----
-MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
-BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
-DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
-BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
-DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
-QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
-gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
-zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
-130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
-JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
-DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
-ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
-AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
-AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
-9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
-bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
-fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
-HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
-t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
-WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
------END CERTIFICATE-----
-
-# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
-# Label: "Cybertrust Global Root"
-# Serial: 4835703278459682877484360
-# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
-# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
-# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
------BEGIN CERTIFICATE-----
-MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
-A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
-bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
-ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
-b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
-7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
-J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
-HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
-t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
-FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
-XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
-MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
-hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
-MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
-A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
-Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
-XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
-omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
-A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
-WL1WMRJOEcgh4LMRkWXbtKaIOM5V
------END CERTIFICATE-----
-
-# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
-# Label: "ePKI Root Certification Authority"
-# Serial: 28956088682735189655030529057352760477
-# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
-# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
-# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
------BEGIN CERTIFICATE-----
-MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
-MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
-ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
-IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
-SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
-SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
-ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
-DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
-TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
-fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
-sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
-WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
-nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
-dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
-NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
-AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
-MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
-ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
-uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
-PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
-JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
-gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
-j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
-5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
-o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
-/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
-Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
-W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
-hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
------END CERTIFICATE-----
-
-# Issuer: O=certSIGN OU=certSIGN ROOT CA
-# Subject: O=certSIGN OU=certSIGN ROOT CA
-# Label: "certSIGN ROOT CA"
-# Serial: 35210227249154
-# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
-# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
-# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
------BEGIN CERTIFICATE-----
-MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
-AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
-QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
-MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
-ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
-0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
-UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
-RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
-OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
-JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
-AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
-BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
-LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
-MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
-44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
-Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
-i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
-9u6wWk5JRFRYX0KD
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G3"
-# Serial: 28809105769928564313984085209975885599
-# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
-# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
-# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
------BEGIN CERTIFICATE-----
-MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
-mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
-MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
-eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
-cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
-BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
-MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
-BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
-LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
-+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
-hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
-5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
-JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
-DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
-huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
-HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
-AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
-zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
-kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
-AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
-SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
-spki4cErx5z481+oghLrGREt
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G2"
-# Serial: 71758320672825410020661621085256472406
-# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
-# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
-# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
------BEGIN CERTIFICATE-----
-MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
-MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
-IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
-BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
-MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
-d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
-YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
-dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
-BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
-papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
-BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
-DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
-KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
-XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
------END CERTIFICATE-----
-
-# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
-# Label: "thawte Primary Root CA - G3"
-# Serial: 127614157056681299805556476275995414779
-# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
-# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
-# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
------BEGIN CERTIFICATE-----
-MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
-rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
-BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
-Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
-LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
-MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
-ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
-gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
-YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
-b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
-9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
-zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
-OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
-HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
-2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
-oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
-t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
-KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
-m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
-MdRAGmI0Nj81Aa6sY6A=
------END CERTIFICATE-----
-
-# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
-# Label: "GeoTrust Primary Certification Authority - G2"
-# Serial: 80682863203381065782177908751794619243
-# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
-# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
-# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
------BEGIN CERTIFICATE-----
-MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
-MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
-KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
-MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
-eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
-BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
-NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
-BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
-MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
-So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
-tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
-CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
-qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
-rD6ogRLQy7rQkgu2npaqBA+K
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Universal Root Certification Authority"
-# Serial: 85209574734084581917763752644031726877
-# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
-# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
-# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
------BEGIN CERTIFICATE-----
-MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
-vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
-ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
-Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
-IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
-IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
-bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
-AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
-9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
-H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
-LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
-/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
-rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
-EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
-WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
-exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
-DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
-sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
-seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
-4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
-BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
-lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
-7M2CYfE45k+XmCpajQ==
------END CERTIFICATE-----
-
-# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
-# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
-# Serial: 63143484348153506665311985501458640051
-# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
-# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
-# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
------BEGIN CERTIFICATE-----
-MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
-U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
-SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
-biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
-GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
-fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
-aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
-aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
-kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
-4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
-FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
------END CERTIFICATE-----
-
-# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
-# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
-# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
-# Serial: 80544274841616
-# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
-# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
-# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
------BEGIN CERTIFICATE-----
-MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
-EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
-MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
-cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
-dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
-pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
-b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
-aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
-IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
-MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
-lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
-AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
-VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
-ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
-BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
-AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
-U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
-bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
-+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
-bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
-uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
-XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G2"
-# Serial: 10000012
-# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a
-# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16
-# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f
------BEGIN CERTIFICATE-----
-MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
-DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
-qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
-uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
-Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
-pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
-5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
-UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
-GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
-5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
-6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
-eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
-B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
-BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
-L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
-HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
-SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
-CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
-5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
-IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
-gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
-+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
-vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
-bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
-N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
-Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
-ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post
-# Label: "Hongkong Post Root CA 1"
-# Serial: 1000
-# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca
-# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58
-# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2
------BEGIN CERTIFICATE-----
-MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
-FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
-Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
-A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
-b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
-jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
-PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
-ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
-nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
-q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
-MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
-mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
-7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
-oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
-EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
-fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
-AmvZWg==
------END CERTIFICATE-----
-
-# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
-# Label: "SecureSign RootCA11"
-# Serial: 1
-# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
-# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
-# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
------BEGIN CERTIFICATE-----
-MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
-MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
-A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
-MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
-Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
-QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
-i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
-h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
-MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
-UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
-8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
-h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
-VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
-KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
-X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
-QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
-pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
-QSdJQO7e5iNEOdyhIta6A/I=
------END CERTIFICATE-----
-
-# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
-# Label: "Microsec e-Szigno Root CA 2009"
-# Serial: 14014712776195784473
-# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
-# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
-# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
------BEGIN CERTIFICATE-----
-MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
-VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
-ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
-CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
-OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
-FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
-Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
-dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
-kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
-cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
-fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
-N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
-xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
-+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
-A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
-Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
-SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
-mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
-ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
-tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
-2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
-HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
-# Label: "GlobalSign Root CA - R3"
-# Serial: 4835703278459759426209954
-# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
-# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
-# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
------BEGIN CERTIFICATE-----
-MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
-MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
-RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
-gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
-KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
-QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
-XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
-LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
-RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
-jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
-6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
-mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
-Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
-WD9f
------END CERTIFICATE-----
-
-# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
-# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
-# Serial: 6047274297262753887
-# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3
-# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa
-# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef
------BEGIN CERTIFICATE-----
-MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
-BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
-cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
-MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
-Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
-thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
-cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
-L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
-NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
-X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
-m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
-Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
-EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
-KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
-6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
-OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
-VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
-VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
-cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
-ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
-AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
-661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
-am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
-ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
-PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
-3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
-SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
-3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
-ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
-StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
-Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
-jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
------END CERTIFICATE-----
-
-# Issuer: CN=Izenpe.com O=IZENPE S.A.
-# Subject: CN=Izenpe.com O=IZENPE S.A.
-# Label: "Izenpe.com"
-# Serial: 917563065490389241595536686991402621
-# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
-# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
-# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
------BEGIN CERTIFICATE-----
-MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
-MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
-ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
-VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
-b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
-scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
-xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
-LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
-uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
-yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
-JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
-rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
-BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
-hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
-QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
-HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
-Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
-QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
-BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
-MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
-AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
-A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
-laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
-awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
-JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
-LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
-VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
-LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
-UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
-QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
-naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
-QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
------END CERTIFICATE-----
-
-# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A.
-# Label: "Chambers of Commerce Root - 2008"
-# Serial: 11806822484801597146
-# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7
-# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c
-# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0
------BEGIN CERTIFICATE-----
-MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
-IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
-MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
-dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
-EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
-MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
-CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
-28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
-VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
-DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
-5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
-ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
-Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
-UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
-+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
-Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
-ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
-hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
-HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
-+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
-YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
-L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
-ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
-IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
-HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
-DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
-PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
-5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
-glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
-FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
-pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
-xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
-tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
-jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
-fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
-OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
-d0jQ
------END CERTIFICATE-----
-
-# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A.
-# Label: "Global Chambersign Root - 2008"
-# Serial: 14541511773111788494
-# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3
-# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c
-# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca
------BEGIN CERTIFICATE-----
-MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
-VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
-IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
-MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
-aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
-MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
-cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
-A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
-BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
-KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
-G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
-zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
-ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
-HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
-Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
-yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
-beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
-6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
-wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
-zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
-BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
-ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
-ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
-cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
-YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
-CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
-KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
-hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
-UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
-X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
-fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
-a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
-Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
-SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
-AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
-M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
-v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
-09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
------END CERTIFICATE-----
-
-# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
-# Label: "Go Daddy Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
-# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
-# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
------BEGIN CERTIFICATE-----
-MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
-EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
-ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
-NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
-EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
-AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
-DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
-E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
-/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
-DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
-GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
-tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
-AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
-WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
-9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
-gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
-2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
-LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
-4uJEvlz36hz1
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
-# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
-# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
------BEGIN CERTIFICATE-----
-MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
-ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
-MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
-b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
-aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
-Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
-ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
-nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
-HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
-Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
-dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
-HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
-BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
-CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
-sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
-4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
-8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
-pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
-mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
------END CERTIFICATE-----
-
-# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
-# Label: "Starfield Services Root Certificate Authority - G2"
-# Serial: 0
-# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
-# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
-# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
------BEGIN CERTIFICATE-----
-MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
-EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
-HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
-ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
-MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
-VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
-ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
-dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
-OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
-8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
-Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
-hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
-6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
-AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
-bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
-ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
-qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
-iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
-0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
-sSi6
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
-# Subject: CN=AffirmTrust Commercial O=AffirmTrust
-# Label: "AffirmTrust Commercial"
-# Serial: 8608355977964138876
-# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
-# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
-# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
-Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
-ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
-MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
-yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
-VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
-nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
-XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
-vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
-Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
-N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
-nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Networking O=AffirmTrust
-# Subject: CN=AffirmTrust Networking O=AffirmTrust
-# Label: "AffirmTrust Networking"
-# Serial: 8957382827206547757
-# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
-# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
-# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
------BEGIN CERTIFICATE-----
-MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
-dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
-MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
-cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
-AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
-YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
-kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
-QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
-6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
-yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
-QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
-KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
-tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
-QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
-Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
-olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
-x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium O=AffirmTrust
-# Subject: CN=AffirmTrust Premium O=AffirmTrust
-# Label: "AffirmTrust Premium"
-# Serial: 7893706540734352110
-# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
-# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
-# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
-BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
-dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
-A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
-cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
-qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
-JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
-+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
-s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
-HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
-70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
-V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
-qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
-5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
-C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
-OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
-FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
-KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
-Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
-8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
-MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
-0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
-u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
-u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
-YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
-GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
-RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
-KeC2uAloGRwYQw==
------END CERTIFICATE-----
-
-# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
-# Label: "AffirmTrust Premium ECC"
-# Serial: 8401224907861490260
-# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
-# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
-# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
------BEGIN CERTIFICATE-----
-MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
-VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
-cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
-BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
-VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
-0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
-ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
-A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
-A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
-aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
-flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Label: "Certum Trusted Network CA"
-# Serial: 279744
-# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
-# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
-# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
------BEGIN CERTIFICATE-----
-MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
-MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
-ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
-cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
-WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
-Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
-IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
-UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
-TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
-BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
-kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
-AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
-HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
-sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
-I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
-J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
-VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
-03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Root Certification Authority"
-# Serial: 1
-# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
-# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
-# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
------BEGIN CERTIFICATE-----
-MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
-MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
-V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
-WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
-LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
-AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
-K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
-RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
-rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
-3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
-hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
-MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
-XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
-lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
-aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
-YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
------END CERTIFICATE-----
-
-# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
-# Label: "Security Communication RootCA2"
-# Serial: 0
-# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
-# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
-# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
-MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
-U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
-DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
-dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
-YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
-OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
-zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
-VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
-hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
-ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
-awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
-OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
-DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
-coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
-okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
-t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
-1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
-SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
------END CERTIFICATE-----
-
-# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority
-# Label: "Hellenic Academic and Research Institutions RootCA 2011"
-# Serial: 0
-# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9
-# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d
-# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71
------BEGIN CERTIFICATE-----
-MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
-RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
-dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
-YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
-NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
-EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
-cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
-c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
-dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
-fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
-bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
-75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
-FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
-HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
-5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
-b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
-A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
-6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
-TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
-dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
-Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
-l7WdmplNsDz4SgCbZN2fOUvRJ9e4
------END CERTIFICATE-----
-
-# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
-# Label: "Actalis Authentication Root CA"
-# Serial: 6271844772424770508
-# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
-# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
-# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
------BEGIN CERTIFICATE-----
-MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
-BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
-MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
-IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
-SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
-ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
-UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
-4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
-KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
-gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
-rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
-51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
-be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
-KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
-v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
-fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
-jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
-ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
-ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
-e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
-jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
-WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
-SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
-pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
-X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
-fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
-K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
-ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
-LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
-LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
------END CERTIFICATE-----
-
-# Issuer: O=Trustis Limited OU=Trustis FPS Root CA
-# Subject: O=Trustis Limited OU=Trustis FPS Root CA
-# Label: "Trustis FPS Root CA"
-# Serial: 36053640375399034304724988975563710553
-# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d
-# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04
-# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d
------BEGIN CERTIFICATE-----
-MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
-MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
-ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
-MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
-MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
-ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
-AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
-iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
-vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
-0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
-OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
-BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
-FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
-GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
-zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
-1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
-f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
-jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
-ZetX2fNXlrtIzYE=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 2 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
-# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
-# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
-6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
-L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
-1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
-MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
-QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
-arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
-Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
-FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
-P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
-9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
-uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
-9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
-A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
-OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
-+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
-KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
-DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
-H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
-I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
-5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
-3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
-Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
------END CERTIFICATE-----
-
-# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
-# Label: "Buypass Class 3 Root CA"
-# Serial: 2
-# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
-# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
-# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
------BEGIN CERTIFICATE-----
-MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
-MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
-Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
-TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
-HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
-BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
-ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
-N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
-tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
-0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
-/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
-KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
-zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
-O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
-34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
-K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
-AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
-Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
-QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
-cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
-IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
-HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
-O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
-033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
-dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
-kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
-3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
-u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
-4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 3"
-# Serial: 1
-# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
-# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
-# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
-8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
-RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
-hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
-ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
-EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
-A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
-WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
-1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
-6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
-91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
-e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
-TpPDpFQUWw==
------END CERTIFICATE-----
-
-# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus
-# Label: "EE Certification Centre Root CA"
-# Serial: 112324828676200291871926431888494945866
-# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f
-# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7
-# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76
------BEGIN CERTIFICATE-----
-MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
-MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
-czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
-CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
-MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
-ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
-b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
-euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
-bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
-WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
-MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
-1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
-VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
-zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
-BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
-BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
-v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
-E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
-uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
-iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
-GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 2009"
-# Serial: 623603
-# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
-# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
-# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
------BEGIN CERTIFICATE-----
-MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
-ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
-HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
-BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
-UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
-tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
-ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
-lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
-/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
-A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
-A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
-dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
-MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
-cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
-L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
-BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
-acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
-o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
-zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
-PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
-Johw1+qRzT65ysCQblrGXnRl11z+o+I=
------END CERTIFICATE-----
-
-# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
-# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
-# Serial: 623604
-# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
-# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
-# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
-MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
-bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
-NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
-BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
-ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
-3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
-qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
-p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
-HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
-ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
-HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
-Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
-c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
-RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
-dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
-Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
-3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
-nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
-CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
-xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
-KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
------END CERTIFICATE-----
-
-# Issuer: CN=CA Disig Root R2 O=Disig a.s.
-# Subject: CN=CA Disig Root R2 O=Disig a.s.
-# Label: "CA Disig Root R2"
-# Serial: 10572350602393338211
-# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
-# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
-# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
------BEGIN CERTIFICATE-----
-MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
-BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
-MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
-MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
-EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
-ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
-NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
-PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
-x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
-QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
-yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
-QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
-H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
-QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
-i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
-nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
-rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
-DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
-hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
-tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
-GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
-lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
-+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
-TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
-nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
-gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
-G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
-zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
-L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
------END CERTIFICATE-----
-
-# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
-# Label: "ACCVRAIZ1"
-# Serial: 6828503384748696800
-# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
-# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
-# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
------BEGIN CERTIFICATE-----
-MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
-AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
-CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
-BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
-VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
-qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
-HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
-G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
-lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
-IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
-0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
-k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
-4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
-m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
-cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
-uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
-KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
-ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
-AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
-VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
-VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
-CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
-cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
-QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
-7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
-cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
-QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
-czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
-aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
-aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
-DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
-BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
-D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
-JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
-AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
-vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
-tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
-7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
-I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
-h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
-d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
-pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
------END CERTIFICATE-----
-
-# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
-# Label: "TWCA Global Root CA"
-# Serial: 3262
-# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
-# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
-# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
------BEGIN CERTIFICATE-----
-MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
-EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
-VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
-NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
-B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
-10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
-0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
-MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
-zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
-46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
-yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
-laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
-oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
-BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
-qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
-4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
-1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
-LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
-H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
-RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
-nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
-15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
-6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
-nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
-wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
-aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
-KwbQBM0=
------END CERTIFICATE-----
-
-# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
-# Label: "TeliaSonera Root CA v1"
-# Serial: 199041966741090107964904287217786801558
-# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
-# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
-# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
------BEGIN CERTIFICATE-----
-MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
-NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
-b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
-VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
-MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
-VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
-7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
-Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
-/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
-81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
-dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
-Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
-sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
-pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
-slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
-arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
-VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
-9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
-dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
-0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
-TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
-Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
-Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
-OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
-vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
-t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
-HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
-SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
------END CERTIFICATE-----
-
-# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
-# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi
-# Label: "E-Tugra Certification Authority"
-# Serial: 7667447206703254355
-# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49
-# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39
-# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c
------BEGIN CERTIFICATE-----
-MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV
-BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC
-aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV
-BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1
-Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz
-MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+
-BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp
-em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN
-ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY
-B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH
-D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF
-Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo
-q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D
-k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH
-fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut
-dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM
-ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8
-zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn
-rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX
-U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6
-Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5
-XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF
-Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR
-HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY
-GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c
-77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3
-+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK
-vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6
-FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl
-yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P
-AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD
-y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d
-NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA==
------END CERTIFICATE-----
-
-# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
-# Label: "T-TeleSec GlobalRoot Class 2"
-# Serial: 1
-# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
-# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
-# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
------BEGIN CERTIFICATE-----
-MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
-KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
-BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
-YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
-OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
-aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
-ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
-AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
-FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
-1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
-jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
-wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
-WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
-NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
-uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
-IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
-g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
-9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
-BSeOE6Fuwg==
------END CERTIFICATE-----
-
-# Issuer: CN=Atos TrustedRoot 2011 O=Atos
-# Subject: CN=Atos TrustedRoot 2011 O=Atos
-# Label: "Atos TrustedRoot 2011"
-# Serial: 6643877497813316402
-# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
-# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
-# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
------BEGIN CERTIFICATE-----
-MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
-AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
-EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
-FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
-REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
-Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
-VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
-SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
-4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
-cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
-eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
-HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
-A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
-DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
-vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
-DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
-maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
-lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
-KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 1 G3"
-# Serial: 687049649626669250736271037606554624078720034195
-# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
-# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
-# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
-MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
-wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
-rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
-68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
-4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
-UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
-abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
-3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
-KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
-hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
-Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
-zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
-ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
-MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
-cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
-qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
-YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
-b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
-8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
-NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
-ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
-q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
-nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 2 G3"
-# Serial: 390156079458959257446133169266079962026824725800
-# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
-# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
-# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
-MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
-qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
-n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
-c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
-O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
-o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
-IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
-IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
-8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
-vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
-7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
-cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
-ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
-AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
-roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
-W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
-lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
-+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
-csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
-dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
-KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
-HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
-WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
------END CERTIFICATE-----
-
-# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
-# Label: "QuoVadis Root CA 3 G3"
-# Serial: 268090761170461462463995952157327242137089239581
-# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
-# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
-# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
-BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
-BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
-MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
-aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
-/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
-FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
-U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
-ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
-FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
-A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
-eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
-sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
-VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
-A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
-ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
-ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
-KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
-FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
-oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
-u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
-0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
-3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
-8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
-DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
-PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
-ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G2"
-# Serial: 15385348160840213938643033620894905419
-# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
-# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
-# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
------BEGIN CERTIFICATE-----
-MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
-n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
-biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
-EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
-bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
-YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
-AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
-BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
-QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
-0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
-lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
-B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
-ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
-IhNzbM8m9Yop5w==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Assured ID Root G3"
-# Serial: 15459312981008553731928384953135426796
-# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
-# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
-# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
------BEGIN CERTIFICATE-----
-MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
-RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
-hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
-Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
-RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
-BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
-AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
-JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
-6pZjamVFkpUBtA==
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G2"
-# Serial: 4293743540046975378534879503202253541
-# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
-# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
-# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
------BEGIN CERTIFICATE-----
-MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
-MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
-MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
-b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
-9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
-2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
-1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
-q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
-tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
-vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
-BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
-5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
-1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
-NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
-Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
-8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
-pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
-MrY=
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Global Root G3"
-# Serial: 7089244469030293291760083333884364146
-# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
-# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
-# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
------BEGIN CERTIFICATE-----
-MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
-CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
-ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
-Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
-EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
-IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
-K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
-fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
-Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
-BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
-AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
-oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
-sycX
------END CERTIFICATE-----
-
-# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
-# Label: "DigiCert Trusted Root G4"
-# Serial: 7451500558977370777930084869016614236
-# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
-# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
-# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
------BEGIN CERTIFICATE-----
-MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
-RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
-UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
-Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
-ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
-xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
-ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
-DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
-jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
-CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
-EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
-fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
-uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
-chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
-9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
-hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
-ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
-SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
-+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
-fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
-sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
-cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
-0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
-4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
-r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
-/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
-gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
------END CERTIFICATE-----
-
-# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
-# Label: "COMODO RSA Certification Authority"
-# Serial: 101909084537582093308941363524873193117
-# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
-# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
-# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
------BEGIN CERTIFICATE-----
-MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
-hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
-A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
-BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
-EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
-Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
-dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
-6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
-pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
-9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
-/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
-Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
-+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
-qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
-SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
-u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
-Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
-crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
-FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
-/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
-wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
-4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
-2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
-FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
-CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
-boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
-jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
-S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
-QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
-0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
-NVOFBkpdn627G190
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
-# Label: "USERTrust RSA Certification Authority"
-# Serial: 2645093764781058787591871645665788717
-# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
-# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
-# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
------BEGIN CERTIFICATE-----
-MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
-iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
-cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
-BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
-MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
-BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
-aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
-dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
-3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
-tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
-Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
-VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
-79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
-c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
-Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
-c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
-UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
-Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
-BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
-A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
-Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
-VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
-ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
-8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
-iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
-Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
-XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
-qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
-VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
-L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
-jjxDah2nGN59PRbxYvnKkKj9
------END CERTIFICATE-----
-
-# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
-# Label: "USERTrust ECC Certification Authority"
-# Serial: 123013823720199481456569720443997572134
-# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
-# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
-# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
------BEGIN CERTIFICATE-----
-MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
-MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
-eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
-JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
-MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
-Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
-VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
-aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
-I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
-o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
-A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
-zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
-RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
-# Label: "GlobalSign ECC Root CA - R4"
-# Serial: 14367148294922964480859022125800977897474
-# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e
-# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb
-# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c
------BEGIN CERTIFICATE-----
-MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ
-FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw
-DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F
-uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX
-kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs
-ewv4n4Q=
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
-# Label: "GlobalSign ECC Root CA - R5"
-# Serial: 32785792099990507226680698011560947931244
-# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
-# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
-# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
------BEGIN CERTIFICATE-----
-MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
-MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
-bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
-DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
-QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
-MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
-8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
-hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
-KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
-515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
-xwy8p2Fp8fc74SrL+SvzZpA3
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden
-# Label: "Staat der Nederlanden Root CA - G3"
-# Serial: 10003001
-# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37
-# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc
-# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28
------BEGIN CERTIFICATE-----
-MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX
-DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
-ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
-b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP
-cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW
-IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX
-xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy
-KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR
-9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az
-5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8
-6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7
-Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP
-bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt
-BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt
-XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF
-MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd
-INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD
-U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp
-LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8
-Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp
-gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh
-/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw
-0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A
-fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq
-4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR
-1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/
-QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM
-94B7IWcnMFk=
------END CERTIFICATE-----
-
-# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden
-# Label: "Staat der Nederlanden EV Root CA"
-# Serial: 10000013
-# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba
-# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb
-# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a
------BEGIN CERTIFICATE-----
-MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO
-TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh
-dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y
-MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg
-TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS
-b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS
-M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC
-UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d
-Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p
-rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l
-pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb
-j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC
-KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS
-/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X
-cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH
-1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP
-px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB
-/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7
-MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI
-eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u
-2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS
-v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC
-wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy
-CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e
-vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6
-Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa
-Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL
-eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8
-FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc
-7uzXLg==
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
-# Label: "IdenTrust Commercial Root CA 1"
-# Serial: 13298821034946342390520003877796839426
-# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
-# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
-# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
------BEGIN CERTIFICATE-----
-MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
-VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
-MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
-JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
-SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
-3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
-+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
-S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
-bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
-T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
-vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
-Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
-dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
-c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
-l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
-iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
-/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
-ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
-6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
-LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
-nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
-+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
-W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
-AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
-l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
-4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
-mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
-7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
------END CERTIFICATE-----
-
-# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
-# Label: "IdenTrust Public Sector Root CA 1"
-# Serial: 13298821034946342390521976156843933698
-# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
-# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
-# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
------BEGIN CERTIFICATE-----
-MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
-MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
-VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
-MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
-MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
-MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
-ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
-RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
-bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
-/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
-3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
-EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
-9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
-GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
-2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
-WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
-W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
-BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
-AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
-t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
-DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
-TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
-lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
-mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
-WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
-+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
-tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
-GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
-8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - G2"
-# Serial: 1246989352
-# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
-# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
-# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
------BEGIN CERTIFICATE-----
-MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
-VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
-cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
-IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
-dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
-NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
-dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
-dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
-aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
-YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
-AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
-RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
-cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
-wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
-U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
-jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
-BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
-BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
-jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
-Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
-1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
-nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
-VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
------END CERTIFICATE-----
-
-# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
-# Label: "Entrust Root Certification Authority - EC1"
-# Serial: 51543124481930649114116133369
-# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
-# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
-# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
------BEGIN CERTIFICATE-----
-MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
-A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
-d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
-dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
-RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
-MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
-VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
-L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
-Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
-ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
-A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
-ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
-Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
-BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
-R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
-hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
------END CERTIFICATE-----
-
-# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
-# Label: "CFCA EV ROOT"
-# Serial: 407555286
-# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
-# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
-# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
------BEGIN CERTIFICATE-----
-MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
-TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
-MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
-aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
-T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
-sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
-TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
-/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
-7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
-EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
-hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
-a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
-aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
-TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
-PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
-cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
-tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
-BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
-ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
-ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
-jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
-ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
-P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
-xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
-Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
-5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
-/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
-AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
-5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
------END CERTIFICATE-----
-
-# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
-# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
-# Label: "OISTE WISeKey Global Root GB CA"
-# Serial: 157768595616588414422159278966750757568
-# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
-# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
-# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
------BEGIN CERTIFICATE-----
-MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
-MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
-Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
-YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
-CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
-b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
-bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
-HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
-WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
-1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
-u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
-99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
-M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
-AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
-BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
-cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
-gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
-ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
-aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
-Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
------END CERTIFICATE-----
-
-# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
-# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
-# Label: "SZAFIR ROOT CA2"
-# Serial: 357043034767186914217277344587386743377558296292
-# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
-# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
-# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
------BEGIN CERTIFICATE-----
-MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
-BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
-ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
-NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
-cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
-Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
-QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
-3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
-3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
-3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
-BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
-XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
-AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
-AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
-8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
-nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
-oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
-d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
-LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
------END CERTIFICATE-----
-
-# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
-# Label: "Certum Trusted Network CA 2"
-# Serial: 44979900017204383099463764357512596969
-# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
-# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
-# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
------BEGIN CERTIFICATE-----
-MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
-gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
-QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
-A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
-OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
-VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
-ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
-b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
-DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
-0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
-OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
-fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
-Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
-o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
-sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
-OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
-Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
-adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
-3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
-AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
-AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
-F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
-CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
-XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
-djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
-WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
-AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
-P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
-b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
-XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
-5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
-DrW5viSP
------END CERTIFICATE-----
-
-# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
-# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
-# Label: "Hellenic Academic and Research Institutions RootCA 2015"
-# Serial: 0
-# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
-# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
-# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
------BEGIN CERTIFICATE-----
-MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
-DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
-IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
-N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
-dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
-A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
-ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
-QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
-dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
-AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
-4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
-AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
-4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
-ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
-9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
-gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
-Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
-NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
-LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
-Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
-HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
-ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
-XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
-M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
-9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
-Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
-j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
-X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
-l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
-bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
-pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
-e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
-vm9qp/UsQu0yrbYhnr68
------END CERTIFICATE-----
-
-# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
-# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
-# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
-# Serial: 0
-# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
-# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
-# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
------BEGIN CERTIFICATE-----
-MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
-BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
-c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
-bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
-b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
-BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
-YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
-MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
-dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
-QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
-jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
-MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
-C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
-lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
-TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
------END CERTIFICATE-----
-
-# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
-# Subject: CN=ISRG Root X1 O=Internet Security Research Group
-# Label: "ISRG Root X1"
-# Serial: 172886928669790476064670243504169061120
-# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
-# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
-# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
------BEGIN CERTIFICATE-----
-MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
-TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
-cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
-WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
-ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
-MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
-h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
-0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
-A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
-T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
-B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
-B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
-KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
-OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
-jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
-qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
-rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
-HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
-hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
-ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
-3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
-NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
-ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
-TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
-jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
-oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
-4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
-mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
-emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
------END CERTIFICATE-----
-
-# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
-# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
-# Label: "AC RAIZ FNMT-RCM"
-# Serial: 485876308206448804701554682760554759
-# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
-# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
-# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
------BEGIN CERTIFICATE-----
-MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
-CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
-WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
-BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
-Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
-yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
-BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
-WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
-tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
-374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
-IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
-mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
-wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
-MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
-ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
-UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
-AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
-YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
-LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
-nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
-RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
-LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
-77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
-JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
-fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
-6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
-1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
-9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
-RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
-uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
------END CERTIFICATE-----
-
-# Issuer: CN=Amazon Root CA 1 O=Amazon
-# Subject: CN=Amazon Root CA 1 O=Amazon
-# Label: "Amazon Root CA 1"
-# Serial: 143266978916655856878034712317230054538369994
-# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
-# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
-# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
------BEGIN CERTIFICATE-----
-MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
-ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
-b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
-MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
-b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
-ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
-9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
-IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
-VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
-93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
-jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
-AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
-A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
-U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
-N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
-o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
-5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
-rqXRfboQnoZsG4q5WTP468SQvvG5
------END CERTIFICATE-----
-
-# Issuer: CN=Amazon Root CA 2 O=Amazon
-# Subject: CN=Amazon Root CA 2 O=Amazon
-# Label: "Amazon Root CA 2"
-# Serial: 143266982885963551818349160658925006970653239
-# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
-# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
-# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
------BEGIN CERTIFICATE-----
-MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
-ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
-b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
-MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
-b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
-gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
-W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
-1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
-8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
-2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
-z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
-8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
-mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
-7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
-+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
-0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
-Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
-UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
-LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
-+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
-k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
-7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
-btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
-urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
-fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
-n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
-76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
-9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
-4PsJYGw=
------END CERTIFICATE-----
-
-# Issuer: CN=Amazon Root CA 3 O=Amazon
-# Subject: CN=Amazon Root CA 3 O=Amazon
-# Label: "Amazon Root CA 3"
-# Serial: 143266986699090766294700635381230934788665930
-# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
-# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
-# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
------BEGIN CERTIFICATE-----
-MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
-MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
-Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
-A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
-Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
-ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
-QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
-ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
-BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
-YyRIHN8wfdVoOw==
------END CERTIFICATE-----
-
-# Issuer: CN=Amazon Root CA 4 O=Amazon
-# Subject: CN=Amazon Root CA 4 O=Amazon
-# Label: "Amazon Root CA 4"
-# Serial: 143266989758080763974105200630763877849284878
-# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
-# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
-# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
------BEGIN CERTIFICATE-----
-MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
-MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
-Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
-A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
-Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
-9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
-M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
-/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
-MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
-CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
-1KyLa2tJElMzrdfkviT8tQp21KW8EA==
------END CERTIFICATE-----
-
-# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
-# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A.
-# Label: "LuxTrust Global Root 2"
-# Serial: 59914338225734147123941058376788110305822489521
-# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c
-# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f
-# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5
------BEGIN CERTIFICATE-----
-MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL
-BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV
-BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw
-MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B
-LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN
-AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F
-ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem
-hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1
-EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn
-Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4
-zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ
-96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m
-j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g
-DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+
-8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j
-X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH
-hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB
-KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0
-Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT
-+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL
-BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9
-BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO
-jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9
-loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c
-qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+
-2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/
-JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre
-zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf
-LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+
-x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6
-oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr
------END CERTIFICATE-----
-
-# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
-# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
-# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
-# Serial: 1
-# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
-# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
-# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
------BEGIN CERTIFICATE-----
-MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
-GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
-bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
-KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
-BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
-dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
-EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
-IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
-QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
-TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
-LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
-a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
-LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
-N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
-YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
-iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
-AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
-V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
-BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
-AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
-IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
-lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
-8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
-lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
------END CERTIFICATE-----
-
-# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
-# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
-# Label: "GDCA TrustAUTH R5 ROOT"
-# Serial: 9009899650740120186
-# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
-# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
-# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
------BEGIN CERTIFICATE-----
-MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
-BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
-IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
-MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
-BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
-HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
-AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
-Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
-TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
-KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
-qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
-MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
-ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
-zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
-L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
-jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
-HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
-AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
-/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
-p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
-DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
-COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
-L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
-JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
-IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
-2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
-09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
-XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
-T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
-MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
------END CERTIFICATE-----
-
-# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Label: "TrustCor RootCert CA-1"
-# Serial: 15752444095811006489
-# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45
-# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a
-# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c
------BEGIN CERTIFICATE-----
-MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD
-VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
-MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
-cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y
-IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB
-pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h
-IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG
-A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU
-cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
-CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid
-RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V
-seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme
-9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV
-EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW
-hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/
-DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw
-DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD
-ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I
-/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf
-ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ
-yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts
-L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN
-zl/HHk484IkzlQsPpTLWPFp5LBk=
------END CERTIFICATE-----
-
-# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Label: "TrustCor RootCert CA-2"
-# Serial: 2711694510199101698
-# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64
-# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0
-# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65
------BEGIN CERTIFICATE-----
-MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV
-BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
-IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
-dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig
-Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk
-MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg
-Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD
-VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy
-dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+
-QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq
-1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp
-2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK
-DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape
-az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF
-3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88
-oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM
-g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3
-mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh
-8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd
-BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U
-nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw
-DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX
-dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+
-MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL
-/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX
-CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa
-ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW
-2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7
-N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3
-Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB
-As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp
-5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu
-1uwJ
------END CERTIFICATE-----
-
-# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority
-# Label: "TrustCor ECA-1"
-# Serial: 9548242946988625984
-# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c
-# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd
-# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD
-VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk
-MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U
-cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y
-IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV
-BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw
-IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy
-dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig
-RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb
-3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA
-BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5
-3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou
-owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/
-wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF
-ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf
-BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/
-MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv
-civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2
-AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F
-hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50
-soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI
-WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi
-tJ/X5g==
------END CERTIFICATE-----
-
-# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
-# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
-# Label: "SSL.com Root Certification Authority RSA"
-# Serial: 8875640296558310041
-# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
-# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
-# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
------BEGIN CERTIFICATE-----
-MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
-BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
-DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
-Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
-OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
-dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
-bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
-AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
-xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
-qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
-C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
-6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
-/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
-YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
-JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
-US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
-ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
-+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
-M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
-HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
-A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
-cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
-Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
-PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
-q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
-cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
-a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
-H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
-K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
-nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
-oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
-Ic2wBlX7Jz9TkHCpBB5XJ7k=
------END CERTIFICATE-----
-
-# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
-# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
-# Label: "SSL.com Root Certification Authority ECC"
-# Serial: 8495723813297216424
-# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
-# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
-# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
------BEGIN CERTIFICATE-----
-MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
-VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
-U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
-aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
-WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
-b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
-b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
-BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
-7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
-CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
-EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
-VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
-kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
-gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
------END CERTIFICATE-----
-
-# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
-# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
-# Label: "SSL.com EV Root Certification Authority RSA R2"
-# Serial: 6248227494352943350
-# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
-# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
-# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
------BEGIN CERTIFICATE-----
-MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
-BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
-CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
-dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
-MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
-A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
-DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
-MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
-M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
-OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
-4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
-HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
-aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
-b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
-Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
-PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
-pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
-UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
-MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
-HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
-9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
-s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
-Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
-cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
-79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
-/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
-ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
-Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
-QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
-w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
-S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
-mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
------END CERTIFICATE-----
-
-# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
-# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
-# Label: "SSL.com EV Root Certification Authority ECC"
-# Serial: 3182246526754555285
-# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
-# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
-# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
------BEGIN CERTIFICATE-----
-MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
-VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
-U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
-Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
-NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
-dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
-bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
-AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
-VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
-WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
-5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
-ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
-h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
------END CERTIFICATE-----
-
-# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
-# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
-# Label: "GlobalSign Root CA - R6"
-# Serial: 1417766617973444989252670301619537
-# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
-# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
-# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
------BEGIN CERTIFICATE-----
-MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
-MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
-bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
-MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
-MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
-KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
-xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
-ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
-aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
-LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
-1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
-k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
-SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
-bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
-WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
-rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
-MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
-AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
-bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
-nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
-Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
-55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
-vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
-cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
-oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
-nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
-pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
-JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
-8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
-5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
------END CERTIFICATE-----
-
-# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
-# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
-# Label: "OISTE WISeKey Global Root GC CA"
-# Serial: 44084345621038548146064804565436152554
-# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
-# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
-# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
------BEGIN CERTIFICATE-----
-MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
-CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
-bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
-Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
-BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
-ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
-b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
-eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
-p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
-BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
-rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
-57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
-Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
------END CERTIFICATE-----
-
-# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
-# Subject: CN=GTS Root R1 O=Google Trust Services LLC
-# Label: "GTS Root R1"
-# Serial: 146587175971765017618439757810265552097
-# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85
-# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8
-# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72
------BEGIN CERTIFICATE-----
-MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH
-MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
-QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
-MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
-cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB
-AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM
-f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX
-mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7
-zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P
-fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc
-vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4
-Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp
-zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO
-Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW
-k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+
-DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF
-lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
-HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW
-Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1
-d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z
-XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR
-gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3
-d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv
-J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg
-DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM
-+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy
-F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9
-SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws
-E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl
------END CERTIFICATE-----
-
-# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
-# Subject: CN=GTS Root R2 O=Google Trust Services LLC
-# Label: "GTS Root R2"
-# Serial: 146587176055767053814479386953112547951
-# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b
-# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d
-# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60
------BEGIN CERTIFICATE-----
-MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH
-MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
-QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy
-MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl
-cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB
-AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv
-CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg
-GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu
-XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd
-re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu
-PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1
-mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K
-8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj
-x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR
-nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0
-kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok
-twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
-HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp
-8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT
-vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT
-z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA
-pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb
-pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB
-R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R
-RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk
-0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC
-5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF
-izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn
-yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC
------END CERTIFICATE-----
-
-# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
-# Subject: CN=GTS Root R3 O=Google Trust Services LLC
-# Label: "GTS Root R3"
-# Serial: 146587176140553309517047991083707763997
-# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25
-# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5
-# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5
------BEGIN CERTIFICATE-----
-MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw
-CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
-MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
-MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
-Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA
-IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout
-736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A
-DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
-DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk
-fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA
-njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd
------END CERTIFICATE-----
-
-# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
-# Subject: CN=GTS Root R4 O=Google Trust Services LLC
-# Label: "GTS Root R4"
-# Serial: 146587176229350439916519468929765261721
-# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26
-# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb
-# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd
------BEGIN CERTIFICATE-----
-MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw
-CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
-MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
-MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
-Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA
-IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu
-hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l
-xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
-DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0
-CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx
-sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w==
------END CERTIFICATE-----
-
-# Issuer: CN=UCA Global G2 Root O=UniTrust
-# Subject: CN=UCA Global G2 Root O=UniTrust
-# Label: "UCA Global G2 Root"
-# Serial: 124779693093741543919145257850076631279
-# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
-# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
-# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
------BEGIN CERTIFICATE-----
-MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
-MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
-bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
-CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
-b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
-b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
-kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
-VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
-VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
-C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
-tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
-D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
-j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
-NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
-iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
-O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
-BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
-ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
-L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
-1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
-1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
-b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
-PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
-y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
-EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
-DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
-+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
-YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
-UB+K+wb1whnw0A==
------END CERTIFICATE-----
-
-# Issuer: CN=UCA Extended Validation Root O=UniTrust
-# Subject: CN=UCA Extended Validation Root O=UniTrust
-# Label: "UCA Extended Validation Root"
-# Serial: 106100277556486529736699587978573607008
-# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
-# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
-# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
------BEGIN CERTIFICATE-----
-MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
-MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
-eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
-MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
-BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
-AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
-D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
-sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
-O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
-sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
-c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
-VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
-KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
-TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
-sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
-1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
-fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
-AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
-l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
-ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
-VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
-c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
-4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
-t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
-2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
-vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
-xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
-cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
-fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
------END CERTIFICATE-----
-
-# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
-# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
-# Label: "Certigna Root CA"
-# Serial: 269714418870597844693661054334862075617
-# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
-# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
-# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
------BEGIN CERTIFICATE-----
-MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
-WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
-MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
-MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
-VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
-BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
-ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
-ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
-CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
-I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
-TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
-C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
-ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
-IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
-Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
-JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
-hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
-GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
-FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
-1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
-L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
-dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
-aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
-hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
-6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
-HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
-0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
-lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
-o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
-gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
-faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
-Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
-jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
-3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
------END CERTIFICATE-----
-
-# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
-# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
-# Label: "emSign Root CA - G1"
-# Serial: 235931866688319308814040
-# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
-# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
-# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
------BEGIN CERTIFICATE-----
-MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
-VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
-ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
-MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
-MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
-Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
-BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
-f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
-8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
-d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
-tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
-Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
-o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
-AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
-PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
-wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
-GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
-6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
-RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
-iN66zB+Afko=
------END CERTIFICATE-----
-
-# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
-# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
-# Label: "emSign ECC Root CA - G3"
-# Serial: 287880440101571086945156
-# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
-# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
-# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
------BEGIN CERTIFICATE-----
-MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
-EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
-bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
-RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
-TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
-b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
-djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
-WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
-fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
-zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
-hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
-CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
-+JbNR6iC8hZVdyR+EhCVBCyj
------END CERTIFICATE-----
-
-# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
-# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
-# Label: "emSign Root CA - C1"
-# Serial: 825510296613316004955058
-# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
-# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
-# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
------BEGIN CERTIFICATE-----
-MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
-A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
-SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
-MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
-biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
-dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
-BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
-HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
-3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
-GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
-xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
-aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
-TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
-BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
-/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
-kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
-YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
-+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
-WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
------END CERTIFICATE-----
-
-# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
-# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
-# Label: "emSign ECC Root CA - C3"
-# Serial: 582948710642506000014504
-# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
-# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
-# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
------BEGIN CERTIFICATE-----
-MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
-EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
-IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
-MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
-biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
-IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
-MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
-sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
-BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
-Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
-3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
-0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
------END CERTIFICATE-----
-
-# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
-# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
-# Label: "Hongkong Post Root CA 3"
-# Serial: 46170865288971385588281144162979347873371282084
-# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
-# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
-# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
------BEGIN CERTIFICATE-----
-MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
-BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
-SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
-a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
-NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
-CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
-Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
-AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
-dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
-VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
-9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
-2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
-vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
-bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
-x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
-l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
-TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
-Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
-BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
-i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
-DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
-7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
-MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
-gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
-GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
-3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
-Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
-l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
-JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
-L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
-LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
-mpv0
------END CERTIFICATE-----
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/core.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/core.py
deleted file mode 100644
index 7271acf..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/certifi/core.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-certifi.py
-~~~~~~~~~~
-
-This module returns the installation location of cacert.pem.
-"""
-import os
-
-
-def where():
-    f = os.path.dirname(__file__)
-
-    return os.path.join(f, 'cacert.pem')
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py
deleted file mode 100644
index 0f9f820..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/__init__.py
+++ /dev/null
@@ -1,39 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-
-from .compat import PY2, PY3
-from .universaldetector import UniversalDetector
-from .version import __version__, VERSION
-
-
-def detect(byte_str):
-    """
-    Detect the encoding of the given byte string.
-
-    :param byte_str:     The byte sequence to examine.
-    :type byte_str:      ``bytes`` or ``bytearray``
-    """
-    if not isinstance(byte_str, bytearray):
-        if not isinstance(byte_str, bytes):
-            raise TypeError('Expected object of type bytes or bytearray, got: '
-                            '{0}'.format(type(byte_str)))
-        else:
-            byte_str = bytearray(byte_str)
-    detector = UniversalDetector()
-    detector.feed(byte_str)
-    return detector.close()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py
deleted file mode 100644
index 38f3251..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5freq.py
+++ /dev/null
@@ -1,386 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Big5 frequency table
-# by Taiwan's Mandarin Promotion Council
-# 
-#
-# 128  --> 0.42261
-# 256  --> 0.57851
-# 512  --> 0.74851
-# 1024 --> 0.89384
-# 2048 --> 0.97583
-#
-# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
-# Random Distribution Ration = 512/(5401-512)=0.105
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
-
-BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
-
-#Char to FreqOrder table
-BIG5_TABLE_SIZE = 5376
-
-BIG5_CHAR_TO_FREQ_ORDER = (
-   1,1801,1506, 255,1431, 198,   9,  82,   6,5008, 177, 202,3681,1256,2821, 110, #   16
-3814,  33,3274, 261,  76,  44,2114,  16,2946,2187,1176, 659,3971,  26,3451,2653, #   32
-1198,3972,3350,4202, 410,2215, 302, 590, 361,1964,   8, 204,  58,4510,5009,1932, #   48
-  63,5010,5011, 317,1614,  75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, #   64
-3682,   3,  10,3973,1471,  29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, #   80
-4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947,  34,3556,3204,  64, 604, #   96
-5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337,  72, 406,5017,  80, #  112
- 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449,  69,2987, 591, #  128
- 179,2096, 471, 115,2035,1844,  60,  50,2988, 134, 806,1869, 734,2036,3454, 180, #  144
- 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, #  160
-2502,  90,2716,1338, 663,  11, 906,1099,2553,  20,2441, 182, 532,1716,5019, 732, #  176
-1376,4204,1311,1420,3206,  25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, #  192
-3276, 475,1447,3683,5020, 117,  21, 656, 810,1297,2300,2334,3557,5021, 126,4205, #  208
- 706, 456, 150, 613,4513,  71,1118,2037,4206, 145,3092,  85, 835, 486,2115,1246, #  224
-1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, #  240
-3558,3135,5023,1956,1153,4207,  83, 296,1199,3093, 192, 624,  93,5024, 822,1898, #  256
-2823,3136, 795,2065, 991,1554,1542,1592,  27,  43,2867, 859, 139,1456, 860,4514, #  272
- 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, #  288
-3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, #  304
-1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, #  320
-5026,5027,2176,3207,3685,2682, 593, 845,1062,3277,  88,1723,2038,3978,1951, 212, #  336
- 266, 152, 149, 468,1899,4208,4516,  77, 187,5028,3038,  37,   5,2990,5029,3979, #  352
-5030,5031,  39,2524,4517,2908,3208,2079,  55, 148,  74,4518, 545, 483,1474,1029, #  368
-1665, 217,1870,1531,3138,1104,2655,4209,  24, 172,3562, 900,3980,3563,3564,4519, #  384
-  32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683,   4,3039,3351,1427,1789, #  400
- 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, #  416
-3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439,  38,5037,1063,5038, 794, #  432
-3982,1435,2301,  46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804,  35, 707, #  448
- 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, #  464
-2129,1363,3689,1423, 697, 100,3094,  48,  70,1231, 495,3139,2196,5043,1294,5044, #  480
-2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, #  496
- 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, #  512
- 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, #  528
-3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, #  544
-1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, #  560
-1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, #  576
-1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381,   7, #  592
-2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, #  608
- 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, #  624
-4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, #  640
-1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, #  656
-5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, #  672
-2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, #  688
- 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, #  704
-  98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, #  720
- 523,2789,2790,2658,5061, 141,2235,1333,  68, 176, 441, 876, 907,4220, 603,2602, #  736
- 710, 171,3464, 404, 549,  18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, #  752
-5063,2991, 368,5064, 146, 366,  99, 871,3693,1543, 748, 807,1586,1185,  22,2263, #  768
- 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, #  784
-1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068,  59,5069, #  800
- 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, #  816
- 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, #  832
-5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, #  848
-1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, #  864
- 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, #  880
-3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, #  896
-4224,  57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, #  912
-3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, #  928
- 279,3145,  51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, #  944
- 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, #  960
-1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, #  976
-4227,2475,1436, 953,4228,2055,4545, 671,2400,  79,4229,2446,3285, 608, 567,2689, #  992
-3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
-3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
-2402,5097,5098,5099,4232,3045,   0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
-5101, 233,4233,3697,1819,4550,4551,5102,  96,1777,1315,2083,5103, 257,5104,1810, # 1056
-3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
-5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
-1484,5110,1712, 127,  67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
-2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
-1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
-  78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
-1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
-4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
-3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
- 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
- 165, 243,4559,3703,2528, 123, 683,4239, 764,4560,  36,3998,1793, 589,2916, 816, # 1232
- 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
-2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
-5122, 611,1156, 854,2386,1316,2875,   2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
-1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
-2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
-1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
-1994,5135,4564,5136,5137,2198,  13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
-5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
-5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
-5149, 128,2133,  92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
-3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
-4567,2252,  94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
-4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
-2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
-5163,2337,2068,  23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
-3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
- 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
-5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863,  41, # 1520
-5170,5171,4575,5172,1657,2338,  19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
-1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
-2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
-3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
-4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
-5182,2692, 733,  40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
-3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
-4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
-1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
-1871,2762,3004,5187, 435,5188, 343,1108, 596,  17,1751,4579,2239,3477,3709,5189, # 1680
-4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
-1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
- 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
-1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
-1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
-3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
- 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
-5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
-2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
-1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
-1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551,  30,2268,4266, # 1856
-5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
- 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
-4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
- 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
-2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
- 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
-1041,3005, 293,1168,  87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
-1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
- 730,1515, 184,2840,  66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
-4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
-4021,5231,5232,1186,  15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
-1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
-3596,1342,1681,1718, 766,3297, 286,  89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
-5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
-5240,3298, 310, 313,3482,2304, 770,4278,  54,3054, 189,4611,3105,3848,4025,5241, # 2096
-1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
-2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
-1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
-3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
-2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
-3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
-2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
-4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
-4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
-3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
-  97,  81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
-3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
- 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
-3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
-4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
-3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
-1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
-5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
- 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
-5286, 587,  14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
-1702,1226, 102,1547,  62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
- 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
-4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294,  86,1494,1730, # 2464
-4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
- 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
-2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
-2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885,  28,2695, # 2528
-3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
-1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
-4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
-2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
-1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
-1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
-2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
-3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
-1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
-5313,3493,5314,5315,5316,3310,2698,1433,3311, 131,  95,1504,4049, 723,4303,3166, # 2688
-1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
-4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654,  53,5320,3014,5321, # 2720
-1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
- 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
-1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
-4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
-4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
-2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
-1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
-4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
- 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
-5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
-2322,3316,5346,5347,4308,5348,4309,  84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
-3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
-4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
- 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
-5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
-5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
-1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
-4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
-4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
-2699,1516,3614,1121,1082,1329,3317,4073,1449,3873,  65,1128,2848,2927,2769,1590, # 3040
-3874,5370,5371,  12,2668,  45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
-3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
-2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
-1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
-4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
-3736,1859,  91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
-3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
-2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
-4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771,  61,4079,3738,1823,4080, # 3184
-5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
-3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
-2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
-3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
-1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
-2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
-3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
-4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063,  56,1396,3113, # 3312
-2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
-2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
-5418,1076,  49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
-1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
-2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
-1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
-3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
-4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629,  31,2851, # 3440
-2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
-3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
-3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
-2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
-4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
-2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
-3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
-4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
-5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
-3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
- 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
-1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412,  42,3119, 464,5455,2642, # 3632
-4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
-1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
-4701,5462,3020, 962, 588,3629, 289,3250,2644,1116,  52,5463,3067,1797,5464,5465, # 3680
-5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
- 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
-5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
-5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
-2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
-3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
-2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
-2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
- 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
-1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
-4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
-3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
-3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
- 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
-2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
- 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
-2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
-4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
-1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
-4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
-1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
-3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
- 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
-3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
-5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
-5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
-3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
-3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
-1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
-2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
-5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
-1561,2674,1452,4113,1375,5549,5550,  47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
-1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
-3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
- 919,2352,2975,2353,1270,4727,4115,  73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
-1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
-4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
-5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
-2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
-3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
- 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
-1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
-2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
-2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
-5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
-5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
-5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
-2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
-2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
-1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
-4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
-3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
-3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
-4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
-4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
-2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
-2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
-5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
-4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
-5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
-4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
- 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
- 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
-1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
-3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
-4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
-1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
-5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
-2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
-2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
-3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
-5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
-1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
-3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
-5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
-1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
-5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
-2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
-3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
-2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
-3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
-3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
-3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
-4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
- 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
-2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
-4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
-3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
-5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
-1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
-5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
- 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
-1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
- 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
-4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
-1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
-4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
-1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
- 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
-3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
-4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
-5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
- 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
-3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
- 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
-2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
-)
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py
deleted file mode 100644
index 98f9970..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/big5prober.py
+++ /dev/null
@@ -1,47 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import Big5DistributionAnalysis
-from .mbcssm import BIG5_SM_MODEL
-
-
-class Big5Prober(MultiByteCharSetProber):
-    def __init__(self):
-        super(Big5Prober, self).__init__()
-        self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
-        self.distribution_analyzer = Big5DistributionAnalysis()
-        self.reset()
-
-    @property
-    def charset_name(self):
-        return "Big5"
-
-    @property
-    def language(self):
-        return "Chinese"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py
deleted file mode 100644
index c0395f4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/chardistribution.py
+++ /dev/null
@@ -1,233 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
-                        EUCTW_TYPICAL_DISTRIBUTION_RATIO)
-from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
-                        EUCKR_TYPICAL_DISTRIBUTION_RATIO)
-from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
-                         GB2312_TYPICAL_DISTRIBUTION_RATIO)
-from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
-                       BIG5_TYPICAL_DISTRIBUTION_RATIO)
-from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
-                      JIS_TYPICAL_DISTRIBUTION_RATIO)
-
-
-class CharDistributionAnalysis(object):
-    ENOUGH_DATA_THRESHOLD = 1024
-    SURE_YES = 0.99
-    SURE_NO = 0.01
-    MINIMUM_DATA_THRESHOLD = 3
-
-    def __init__(self):
-        # Mapping table to get frequency order from char order (get from
-        # GetOrder())
-        self._char_to_freq_order = None
-        self._table_size = None  # Size of above table
-        # This is a constant value which varies from language to language,
-        # used in calculating confidence.  See
-        # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
-        # for further detail.
-        self.typical_distribution_ratio = None
-        self._done = None
-        self._total_chars = None
-        self._freq_chars = None
-        self.reset()
-
-    def reset(self):
-        """reset analyser, clear any state"""
-        # If this flag is set to True, detection is done and conclusion has
-        # been made
-        self._done = False
-        self._total_chars = 0  # Total characters encountered
-        # The number of characters whose frequency order is less than 512
-        self._freq_chars = 0
-
-    def feed(self, char, char_len):
-        """feed a character with known length"""
-        if char_len == 2:
-            # we only care about 2-bytes character in our distribution analysis
-            order = self.get_order(char)
-        else:
-            order = -1
-        if order >= 0:
-            self._total_chars += 1
-            # order is valid
-            if order < self._table_size:
-                if 512 > self._char_to_freq_order[order]:
-                    self._freq_chars += 1
-
-    def get_confidence(self):
-        """return confidence based on existing data"""
-        # if we didn't receive any character in our consideration range,
-        # return negative answer
-        if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
-            return self.SURE_NO
-
-        if self._total_chars != self._freq_chars:
-            r = (self._freq_chars / ((self._total_chars - self._freq_chars)
-                 * self.typical_distribution_ratio))
-            if r < self.SURE_YES:
-                return r
-
-        # normalize confidence (we don't want to be 100% sure)
-        return self.SURE_YES
-
-    def got_enough_data(self):
-        # It is not necessary to receive all data to draw conclusion.
-        # For charset detection, certain amount of data is enough
-        return self._total_chars > self.ENOUGH_DATA_THRESHOLD
-
-    def get_order(self, byte_str):
-        # We do not handle characters based on the original encoding string,
-        # but convert this encoding string to a number, here called order.
-        # This allows multiple encodings of a language to share one frequency
-        # table.
-        return -1
-
-
-class EUCTWDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(EUCTWDistributionAnalysis, self).__init__()
-        self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
-        self._table_size = EUCTW_TABLE_SIZE
-        self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for euc-TW encoding, we are interested
-        #   first  byte range: 0xc4 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char = byte_str[0]
-        if first_char >= 0xC4:
-            return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
-        else:
-            return -1
-
-
-class EUCKRDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(EUCKRDistributionAnalysis, self).__init__()
-        self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
-        self._table_size = EUCKR_TABLE_SIZE
-        self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for euc-KR encoding, we are interested
-        #   first  byte range: 0xb0 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char = byte_str[0]
-        if first_char >= 0xB0:
-            return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
-        else:
-            return -1
-
-
-class GB2312DistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(GB2312DistributionAnalysis, self).__init__()
-        self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
-        self._table_size = GB2312_TABLE_SIZE
-        self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for GB2312 encoding, we are interested
-        #  first  byte range: 0xb0 -- 0xfe
-        #  second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = byte_str[0], byte_str[1]
-        if (first_char >= 0xB0) and (second_char >= 0xA1):
-            return 94 * (first_char - 0xB0) + second_char - 0xA1
-        else:
-            return -1
-
-
-class Big5DistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(Big5DistributionAnalysis, self).__init__()
-        self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
-        self._table_size = BIG5_TABLE_SIZE
-        self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for big5 encoding, we are interested
-        #   first  byte range: 0xa4 -- 0xfe
-        #   second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = byte_str[0], byte_str[1]
-        if first_char >= 0xA4:
-            if second_char >= 0xA1:
-                return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
-            else:
-                return 157 * (first_char - 0xA4) + second_char - 0x40
-        else:
-            return -1
-
-
-class SJISDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(SJISDistributionAnalysis, self).__init__()
-        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
-        self._table_size = JIS_TABLE_SIZE
-        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for sjis encoding, we are interested
-        #   first  byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
-        #   second byte range: 0x40 -- 0x7e,  0x81 -- oxfe
-        # no validation needed here. State machine has done that
-        first_char, second_char = byte_str[0], byte_str[1]
-        if (first_char >= 0x81) and (first_char <= 0x9F):
-            order = 188 * (first_char - 0x81)
-        elif (first_char >= 0xE0) and (first_char <= 0xEF):
-            order = 188 * (first_char - 0xE0 + 31)
-        else:
-            return -1
-        order = order + second_char - 0x40
-        if second_char > 0x7F:
-            order = -1
-        return order
-
-
-class EUCJPDistributionAnalysis(CharDistributionAnalysis):
-    def __init__(self):
-        super(EUCJPDistributionAnalysis, self).__init__()
-        self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
-        self._table_size = JIS_TABLE_SIZE
-        self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
-
-    def get_order(self, byte_str):
-        # for euc-JP encoding, we are interested
-        #   first  byte range: 0xa0 -- 0xfe
-        #   second byte range: 0xa1 -- 0xfe
-        # no validation needed here. State machine has done that
-        char = byte_str[0]
-        if char >= 0xA0:
-            return 94 * (char - 0xA1) + byte_str[1] - 0xa1
-        else:
-            return -1
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py
deleted file mode 100644
index 8b3738e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetgroupprober.py
+++ /dev/null
@@ -1,106 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .enums import ProbingState
-from .charsetprober import CharSetProber
-
-
-class CharSetGroupProber(CharSetProber):
-    def __init__(self, lang_filter=None):
-        super(CharSetGroupProber, self).__init__(lang_filter=lang_filter)
-        self._active_num = 0
-        self.probers = []
-        self._best_guess_prober = None
-
-    def reset(self):
-        super(CharSetGroupProber, self).reset()
-        self._active_num = 0
-        for prober in self.probers:
-            if prober:
-                prober.reset()
-                prober.active = True
-                self._active_num += 1
-        self._best_guess_prober = None
-
-    @property
-    def charset_name(self):
-        if not self._best_guess_prober:
-            self.get_confidence()
-            if not self._best_guess_prober:
-                return None
-        return self._best_guess_prober.charset_name
-
-    @property
-    def language(self):
-        if not self._best_guess_prober:
-            self.get_confidence()
-            if not self._best_guess_prober:
-                return None
-        return self._best_guess_prober.language
-
-    def feed(self, byte_str):
-        for prober in self.probers:
-            if not prober:
-                continue
-            if not prober.active:
-                continue
-            state = prober.feed(byte_str)
-            if not state:
-                continue
-            if state == ProbingState.FOUND_IT:
-                self._best_guess_prober = prober
-                return self.state
-            elif state == ProbingState.NOT_ME:
-                prober.active = False
-                self._active_num -= 1
-                if self._active_num <= 0:
-                    self._state = ProbingState.NOT_ME
-                    return self.state
-        return self.state
-
-    def get_confidence(self):
-        state = self.state
-        if state == ProbingState.FOUND_IT:
-            return 0.99
-        elif state == ProbingState.NOT_ME:
-            return 0.01
-        best_conf = 0.0
-        self._best_guess_prober = None
-        for prober in self.probers:
-            if not prober:
-                continue
-            if not prober.active:
-                self.logger.debug('%s not active', prober.charset_name)
-                continue
-            conf = prober.get_confidence()
-            self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, conf)
-            if best_conf < conf:
-                best_conf = conf
-                self._best_guess_prober = prober
-        if not self._best_guess_prober:
-            return 0.0
-        return best_conf
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py
deleted file mode 100644
index eac4e59..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/charsetprober.py
+++ /dev/null
@@ -1,145 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import logging
-import re
-
-from .enums import ProbingState
-
-
-class CharSetProber(object):
-
-    SHORTCUT_THRESHOLD = 0.95
-
-    def __init__(self, lang_filter=None):
-        self._state = None
-        self.lang_filter = lang_filter
-        self.logger = logging.getLogger(__name__)
-
-    def reset(self):
-        self._state = ProbingState.DETECTING
-
-    @property
-    def charset_name(self):
-        return None
-
-    def feed(self, buf):
-        pass
-
-    @property
-    def state(self):
-        return self._state
-
-    def get_confidence(self):
-        return 0.0
-
-    @staticmethod
-    def filter_high_byte_only(buf):
-        buf = re.sub(b'([\x00-\x7F])+', b' ', buf)
-        return buf
-
-    @staticmethod
-    def filter_international_words(buf):
-        """
-        We define three types of bytes:
-        alphabet: english alphabets [a-zA-Z]
-        international: international characters [\x80-\xFF]
-        marker: everything else [^a-zA-Z\x80-\xFF]
-
-        The input buffer can be thought to contain a series of words delimited
-        by markers. This function works to filter all words that contain at
-        least one international character. All contiguous sequences of markers
-        are replaced by a single space ascii character.
-
-        This filter applies to all scripts which do not use English characters.
-        """
-        filtered = bytearray()
-
-        # This regex expression filters out only words that have at-least one
-        # international character. The word may include one marker character at
-        # the end.
-        words = re.findall(b'[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?',
-                           buf)
-
-        for word in words:
-            filtered.extend(word[:-1])
-
-            # If the last character in the word is a marker, replace it with a
-            # space as markers shouldn't affect our analysis (they are used
-            # similarly across all languages and may thus have similar
-            # frequencies).
-            last_char = word[-1:]
-            if not last_char.isalpha() and last_char < b'\x80':
-                last_char = b' '
-            filtered.extend(last_char)
-
-        return filtered
-
-    @staticmethod
-    def filter_with_english_letters(buf):
-        """
-        Returns a copy of ``buf`` that retains only the sequences of English
-        alphabet and high byte characters that are not between <> characters.
-        Also retains English alphabet and high byte characters immediately
-        before occurrences of >.
-
-        This filter can be applied to all scripts which contain both English
-        characters and extended ASCII characters, but is currently only used by
-        ``Latin1Prober``.
-        """
-        filtered = bytearray()
-        in_tag = False
-        prev = 0
-
-        for curr in range(len(buf)):
-            # Slice here to get bytes instead of an int with Python 3
-            buf_char = buf[curr:curr + 1]
-            # Check if we're coming out of or entering an HTML tag
-            if buf_char == b'>':
-                in_tag = False
-            elif buf_char == b'<':
-                in_tag = True
-
-            # If current character is not extended-ASCII and not alphabetic...
-            if buf_char < b'\x80' and not buf_char.isalpha():
-                # ...and we're not in a tag
-                if curr > prev and not in_tag:
-                    # Keep everything after last non-extended-ASCII,
-                    # non-alphabetic character
-                    filtered.extend(buf[prev:curr])
-                    # Output a space to delimit stretch we kept
-                    filtered.extend(b' ')
-                prev = curr + 1
-
-        # If we're not in a tag...
-        if not in_tag:
-            # Keep everything after last non-extended-ASCII, non-alphabetic
-            # character
-            filtered.extend(buf[prev:])
-
-        return filtered
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py
deleted file mode 100644
index 8b13789..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py
deleted file mode 100644
index c61136b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cli/chardetect.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-"""
-Script which takes one or more file paths and reports on their detected
-encodings
-
-Example::
-
-    % chardetect somefile someotherfile
-    somefile: windows-1252 with confidence 0.5
-    someotherfile: ascii with confidence 1.0
-
-If no paths are provided, it takes its input from stdin.
-
-"""
-
-from __future__ import absolute_import, print_function, unicode_literals
-
-import argparse
-import sys
-
-from pip._vendor.chardet import __version__
-from pip._vendor.chardet.compat import PY2
-from pip._vendor.chardet.universaldetector import UniversalDetector
-
-
-def description_of(lines, name='stdin'):
-    """
-    Return a string describing the probable encoding of a file or
-    list of strings.
-
-    :param lines: The lines to get the encoding of.
-    :type lines: Iterable of bytes
-    :param name: Name of file or collection of lines
-    :type name: str
-    """
-    u = UniversalDetector()
-    for line in lines:
-        line = bytearray(line)
-        u.feed(line)
-        # shortcut out of the loop to save reading further - particularly useful if we read a BOM.
-        if u.done:
-            break
-    u.close()
-    result = u.result
-    if PY2:
-        name = name.decode(sys.getfilesystemencoding(), 'ignore')
-    if result['encoding']:
-        return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
-                                                     result['confidence'])
-    else:
-        return '{0}: no result'.format(name)
-
-
-def main(argv=None):
-    """
-    Handles command line arguments and gets things started.
-
-    :param argv: List of arguments, as if specified on the command-line.
-                 If None, ``sys.argv[1:]`` is used instead.
-    :type argv: list of str
-    """
-    # Get command line arguments
-    parser = argparse.ArgumentParser(
-        description="Takes one or more file paths and reports their detected \
-                     encodings")
-    parser.add_argument('input',
-                        help='File whose encoding we would like to determine. \
-                              (default: stdin)',
-                        type=argparse.FileType('rb'), nargs='*',
-                        default=[sys.stdin if PY2 else sys.stdin.buffer])
-    parser.add_argument('--version', action='version',
-                        version='%(prog)s {0}'.format(__version__))
-    args = parser.parse_args(argv)
-
-    for f in args.input:
-        if f.isatty():
-            print("You are running chardetect interactively. Press " +
-                  "CTRL-D twice at the start of a blank line to signal the " +
-                  "end of your input. If you want help, run chardetect " +
-                  "--help\n", file=sys.stderr)
-        print(description_of(f, f.name))
-
-
-if __name__ == '__main__':
-    main()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py
deleted file mode 100644
index 68fba44..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/codingstatemachine.py
+++ /dev/null
@@ -1,88 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import logging
-
-from .enums import MachineState
-
-
-class CodingStateMachine(object):
-    """
-    A state machine to verify a byte sequence for a particular encoding. For
-    each byte the detector receives, it will feed that byte to every active
-    state machine available, one byte at a time. The state machine changes its
-    state based on its previous state and the byte it receives. There are 3
-    states in a state machine that are of interest to an auto-detector:
-
-    START state: This is the state to start with, or a legal byte sequence
-                 (i.e. a valid code point) for character has been identified.
-
-    ME state:  This indicates that the state machine identified a byte sequence
-               that is specific to the charset it is designed for and that
-               there is no other possible encoding which can contain this byte
-               sequence. This will to lead to an immediate positive answer for
-               the detector.
-
-    ERROR state: This indicates the state machine identified an illegal byte
-                 sequence for that encoding. This will lead to an immediate
-                 negative answer for this encoding. Detector will exclude this
-                 encoding from consideration from here on.
-    """
-    def __init__(self, sm):
-        self._model = sm
-        self._curr_byte_pos = 0
-        self._curr_char_len = 0
-        self._curr_state = None
-        self.logger = logging.getLogger(__name__)
-        self.reset()
-
-    def reset(self):
-        self._curr_state = MachineState.START
-
-    def next_state(self, c):
-        # for each byte we get its class
-        # if it is first byte, we also get byte length
-        byte_class = self._model['class_table'][c]
-        if self._curr_state == MachineState.START:
-            self._curr_byte_pos = 0
-            self._curr_char_len = self._model['char_len_table'][byte_class]
-        # from byte's class and state_table, we get its next state
-        curr_state = (self._curr_state * self._model['class_factor']
-                      + byte_class)
-        self._curr_state = self._model['state_table'][curr_state]
-        self._curr_byte_pos += 1
-        return self._curr_state
-
-    def get_current_charlen(self):
-        return self._curr_char_len
-
-    def get_coding_state_machine(self):
-        return self._model['name']
-
-    @property
-    def language(self):
-        return self._model['language']
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py
deleted file mode 100644
index ddd7468..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/compat.py
+++ /dev/null
@@ -1,34 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# Contributor(s):
-#   Dan Blanchard
-#   Ian Cordasco
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-import sys
-
-
-if sys.version_info < (3, 0):
-    PY2 = True
-    PY3 = False
-    base_str = (str, unicode)
-    text_type = unicode
-else:
-    PY2 = False
-    PY3 = True
-    base_str = (bytes, str)
-    text_type = str
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py
deleted file mode 100644
index efd793a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/cp949prober.py
+++ /dev/null
@@ -1,49 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .chardistribution import EUCKRDistributionAnalysis
-from .codingstatemachine import CodingStateMachine
-from .mbcharsetprober import MultiByteCharSetProber
-from .mbcssm import CP949_SM_MODEL
-
-
-class CP949Prober(MultiByteCharSetProber):
-    def __init__(self):
-        super(CP949Prober, self).__init__()
-        self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
-        # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
-        #       not different.
-        self.distribution_analyzer = EUCKRDistributionAnalysis()
-        self.reset()
-
-    @property
-    def charset_name(self):
-        return "CP949"
-
-    @property
-    def language(self):
-        return "Korean"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py
deleted file mode 100644
index 0451207..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/enums.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""
-All of the Enums that are used throughout the chardet package.
-
-:author: Dan Blanchard (dan.blanchard@gmail.com)
-"""
-
-
-class InputState(object):
-    """
-    This enum represents the different states a universal detector can be in.
-    """
-    PURE_ASCII = 0
-    ESC_ASCII = 1
-    HIGH_BYTE = 2
-
-
-class LanguageFilter(object):
-    """
-    This enum represents the different language filters we can apply to a
-    ``UniversalDetector``.
-    """
-    CHINESE_SIMPLIFIED = 0x01
-    CHINESE_TRADITIONAL = 0x02
-    JAPANESE = 0x04
-    KOREAN = 0x08
-    NON_CJK = 0x10
-    ALL = 0x1F
-    CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
-    CJK = CHINESE | JAPANESE | KOREAN
-
-
-class ProbingState(object):
-    """
-    This enum represents the different states a prober can be in.
-    """
-    DETECTING = 0
-    FOUND_IT = 1
-    NOT_ME = 2
-
-
-class MachineState(object):
-    """
-    This enum represents the different states a state machine can be in.
-    """
-    START = 0
-    ERROR = 1
-    ITS_ME = 2
-
-
-class SequenceLikelihood(object):
-    """
-    This enum represents the likelihood of a character following the previous one.
-    """
-    NEGATIVE = 0
-    UNLIKELY = 1
-    LIKELY = 2
-    POSITIVE = 3
-
-    @classmethod
-    def get_num_categories(cls):
-        """:returns: The number of likelihood categories in the enum."""
-        return 4
-
-
-class CharacterCategory(object):
-    """
-    This enum represents the different categories language models for
-    ``SingleByteCharsetProber`` put characters into.
-
-    Anything less than CONTROL is considered a letter.
-    """
-    UNDEFINED = 255
-    LINE_BREAK = 254
-    SYMBOL = 253
-    DIGIT = 252
-    CONTROL = 251
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py
deleted file mode 100644
index c70493f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escprober.py
+++ /dev/null
@@ -1,101 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .codingstatemachine import CodingStateMachine
-from .enums import LanguageFilter, ProbingState, MachineState
-from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
-                    ISO2022KR_SM_MODEL)
-
-
-class EscCharSetProber(CharSetProber):
-    """
-    This CharSetProber uses a "code scheme" approach for detecting encodings,
-    whereby easily recognizable escape or shift sequences are relied on to
-    identify these encodings.
-    """
-
-    def __init__(self, lang_filter=None):
-        super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
-        self.coding_sm = []
-        if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
-            self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
-            self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
-        if self.lang_filter & LanguageFilter.JAPANESE:
-            self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
-        if self.lang_filter & LanguageFilter.KOREAN:
-            self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
-        self.active_sm_count = None
-        self._detected_charset = None
-        self._detected_language = None
-        self._state = None
-        self.reset()
-
-    def reset(self):
-        super(EscCharSetProber, self).reset()
-        for coding_sm in self.coding_sm:
-            if not coding_sm:
-                continue
-            coding_sm.active = True
-            coding_sm.reset()
-        self.active_sm_count = len(self.coding_sm)
-        self._detected_charset = None
-        self._detected_language = None
-
-    @property
-    def charset_name(self):
-        return self._detected_charset
-
-    @property
-    def language(self):
-        return self._detected_language
-
-    def get_confidence(self):
-        if self._detected_charset:
-            return 0.99
-        else:
-            return 0.00
-
-    def feed(self, byte_str):
-        for c in byte_str:
-            for coding_sm in self.coding_sm:
-                if not coding_sm or not coding_sm.active:
-                    continue
-                coding_state = coding_sm.next_state(c)
-                if coding_state == MachineState.ERROR:
-                    coding_sm.active = False
-                    self.active_sm_count -= 1
-                    if self.active_sm_count <= 0:
-                        self._state = ProbingState.NOT_ME
-                        return self.state
-                elif coding_state == MachineState.ITS_ME:
-                    self._state = ProbingState.FOUND_IT
-                    self._detected_charset = coding_sm.get_coding_state_machine()
-                    self._detected_language = coding_sm.language
-                    return self.state
-
-        return self.state
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py
deleted file mode 100644
index 0069523..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/escsm.py
+++ /dev/null
@@ -1,246 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .enums import MachineState
-
-HZ_CLS = (
-1,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,0,0,0,0,  # 20 - 27
-0,0,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,0,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,4,0,5,2,0,  # 78 - 7f
-1,1,1,1,1,1,1,1,  # 80 - 87
-1,1,1,1,1,1,1,1,  # 88 - 8f
-1,1,1,1,1,1,1,1,  # 90 - 97
-1,1,1,1,1,1,1,1,  # 98 - 9f
-1,1,1,1,1,1,1,1,  # a0 - a7
-1,1,1,1,1,1,1,1,  # a8 - af
-1,1,1,1,1,1,1,1,  # b0 - b7
-1,1,1,1,1,1,1,1,  # b8 - bf
-1,1,1,1,1,1,1,1,  # c0 - c7
-1,1,1,1,1,1,1,1,  # c8 - cf
-1,1,1,1,1,1,1,1,  # d0 - d7
-1,1,1,1,1,1,1,1,  # d8 - df
-1,1,1,1,1,1,1,1,  # e0 - e7
-1,1,1,1,1,1,1,1,  # e8 - ef
-1,1,1,1,1,1,1,1,  # f0 - f7
-1,1,1,1,1,1,1,1,  # f8 - ff
-)
-
-HZ_ST = (
-MachineState.START,MachineState.ERROR,     3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
-MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,     4,MachineState.ERROR,# 10-17
-     5,MachineState.ERROR,     6,MachineState.ERROR,     5,     5,     4,MachineState.ERROR,# 18-1f
-     4,MachineState.ERROR,     4,     4,     4,MachineState.ERROR,     4,MachineState.ERROR,# 20-27
-     4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
-)
-
-HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
-
-HZ_SM_MODEL = {'class_table': HZ_CLS,
-               'class_factor': 6,
-               'state_table': HZ_ST,
-               'char_len_table': HZ_CHAR_LEN_TABLE,
-               'name': "HZ-GB-2312",
-               'language': 'Chinese'}
-
-ISO2022CN_CLS = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,0,0,0,0,  # 20 - 27
-0,3,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,4,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022CN_ST = (
-MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
-MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
-MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
-MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,# 18-1f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
-     5,     6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
-)
-
-ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
-
-ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
-                      'class_factor': 9,
-                      'state_table': ISO2022CN_ST,
-                      'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
-                      'name': "ISO-2022-CN",
-                      'language': 'Chinese'}
-
-ISO2022JP_CLS = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,2,2,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,7,0,0,0,  # 20 - 27
-3,0,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-6,0,4,0,8,0,0,0,  # 40 - 47
-0,9,5,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022JP_ST = (
-MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
-MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
-MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
-MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,MachineState.ERROR,# 20-27
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
-)
-
-ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
-
-ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
-                      'class_factor': 10,
-                      'state_table': ISO2022JP_ST,
-                      'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
-                      'name': "ISO-2022-JP",
-                      'language': 'Japanese'}
-
-ISO2022KR_CLS = (
-2,0,0,0,0,0,0,0,  # 00 - 07
-0,0,0,0,0,0,0,0,  # 08 - 0f
-0,0,0,0,0,0,0,0,  # 10 - 17
-0,0,0,1,0,0,0,0,  # 18 - 1f
-0,0,0,0,3,0,0,0,  # 20 - 27
-0,4,0,0,0,0,0,0,  # 28 - 2f
-0,0,0,0,0,0,0,0,  # 30 - 37
-0,0,0,0,0,0,0,0,  # 38 - 3f
-0,0,0,5,0,0,0,0,  # 40 - 47
-0,0,0,0,0,0,0,0,  # 48 - 4f
-0,0,0,0,0,0,0,0,  # 50 - 57
-0,0,0,0,0,0,0,0,  # 58 - 5f
-0,0,0,0,0,0,0,0,  # 60 - 67
-0,0,0,0,0,0,0,0,  # 68 - 6f
-0,0,0,0,0,0,0,0,  # 70 - 77
-0,0,0,0,0,0,0,0,  # 78 - 7f
-2,2,2,2,2,2,2,2,  # 80 - 87
-2,2,2,2,2,2,2,2,  # 88 - 8f
-2,2,2,2,2,2,2,2,  # 90 - 97
-2,2,2,2,2,2,2,2,  # 98 - 9f
-2,2,2,2,2,2,2,2,  # a0 - a7
-2,2,2,2,2,2,2,2,  # a8 - af
-2,2,2,2,2,2,2,2,  # b0 - b7
-2,2,2,2,2,2,2,2,  # b8 - bf
-2,2,2,2,2,2,2,2,  # c0 - c7
-2,2,2,2,2,2,2,2,  # c8 - cf
-2,2,2,2,2,2,2,2,  # d0 - d7
-2,2,2,2,2,2,2,2,  # d8 - df
-2,2,2,2,2,2,2,2,  # e0 - e7
-2,2,2,2,2,2,2,2,  # e8 - ef
-2,2,2,2,2,2,2,2,  # f0 - f7
-2,2,2,2,2,2,2,2,  # f8 - ff
-)
-
-ISO2022KR_ST = (
-MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
-MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     4,MachineState.ERROR,MachineState.ERROR,# 10-17
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
-MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
-)
-
-ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
-
-ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
-                      'class_factor': 6,
-                      'state_table': ISO2022KR_ST,
-                      'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
-                      'name': "ISO-2022-KR",
-                      'language': 'Korean'}
-
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py
deleted file mode 100644
index 20ce8f7..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/eucjpprober.py
+++ /dev/null
@@ -1,92 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .enums import ProbingState, MachineState
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCJPDistributionAnalysis
-from .jpcntx import EUCJPContextAnalysis
-from .mbcssm import EUCJP_SM_MODEL
-
-
-class EUCJPProber(MultiByteCharSetProber):
-    def __init__(self):
-        super(EUCJPProber, self).__init__()
-        self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
-        self.distribution_analyzer = EUCJPDistributionAnalysis()
-        self.context_analyzer = EUCJPContextAnalysis()
-        self.reset()
-
-    def reset(self):
-        super(EUCJPProber, self).reset()
-        self.context_analyzer.reset()
-
-    @property
-    def charset_name(self):
-        return "EUC-JP"
-
-    @property
-    def language(self):
-        return "Japanese"
-
-    def feed(self, byte_str):
-        for i in range(len(byte_str)):
-            # PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
-            coding_state = self.coding_sm.next_state(byte_str[i])
-            if coding_state == MachineState.ERROR:
-                self.logger.debug('%s %s prober hit error at byte %s',
-                                  self.charset_name, self.language, i)
-                self._state = ProbingState.NOT_ME
-                break
-            elif coding_state == MachineState.ITS_ME:
-                self._state = ProbingState.FOUND_IT
-                break
-            elif coding_state == MachineState.START:
-                char_len = self.coding_sm.get_current_charlen()
-                if i == 0:
-                    self._last_char[1] = byte_str[0]
-                    self.context_analyzer.feed(self._last_char, char_len)
-                    self.distribution_analyzer.feed(self._last_char, char_len)
-                else:
-                    self.context_analyzer.feed(byte_str[i - 1:i + 1],
-                                                char_len)
-                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
-                                                     char_len)
-
-        self._last_char[0] = byte_str[-1]
-
-        if self.state == ProbingState.DETECTING:
-            if (self.context_analyzer.got_enough_data() and
-               (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
-                self._state = ProbingState.FOUND_IT
-
-        return self.state
-
-    def get_confidence(self):
-        context_conf = self.context_analyzer.get_confidence()
-        distrib_conf = self.distribution_analyzer.get_confidence()
-        return max(context_conf, distrib_conf)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py
deleted file mode 100644
index b68078c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrfreq.py
+++ /dev/null
@@ -1,195 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Sampling from about 20M text materials include literature and computer technology
-
-# 128  --> 0.79
-# 256  --> 0.92
-# 512  --> 0.986
-# 1024 --> 0.99944
-# 2048 --> 0.99999
-#
-# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
-# Random Distribution Ration = 512 / (2350-512) = 0.279.
-#
-# Typical Distribution Ratio
-
-EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
-
-EUCKR_TABLE_SIZE = 2352
-
-# Char to FreqOrder table ,
-EUCKR_CHAR_TO_FREQ_ORDER = (
-  13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722,  87,
-1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
-1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488,  20,1733,1269,1734,
- 945,1400,1735,  47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
- 116, 987, 813,1401, 683,  75,1204, 145,1740,1741,1742,1743,  16, 847, 667, 622,
- 708,1744,1745,1746, 966, 787, 304, 129,1747,  60, 820, 123, 676,1748,1749,1750,
-1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
- 344,1763,1764,1765,1766,  89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
- 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
-1780, 337, 751,1058,  28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782,  19,
-1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
-1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
-1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
-1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
- 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
-1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
-1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
-1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
-1412,1837,1838,  39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
- 544,1023,1081, 869,  91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
-1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
- 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
- 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
-1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
- 282,  96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
-1421, 268,1877,1422,1878,1879,1880, 308,1881,   2, 537,1882,1883,1215,1884,1885,
- 127, 791,1886,1273,1423,1887,  34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
-   0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
-1894,1123,  48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
-1899, 694,1900, 909, 734,1424, 572, 866,1425, 691,  85, 524,1010, 543, 394, 841,
-1901,1902,1903,1026,1904,1905,1906,1907,1908,1909,  30, 451, 651, 988, 310,1910,
-1911,1426, 810,1216,  93,1912,1913,1277,1217,1914, 858, 759,  45,  58, 181, 610,
- 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
-1919, 359,1920, 687,1921, 822,1922, 293,1923,1924,  40, 662, 118, 692,  29, 939,
- 887, 640, 482, 174,1925,  69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
- 217, 854,1163, 823,1927,1928,1929,1930, 834,1931,  78,1932, 859,1933,1063,1934,
-1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
-1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
-1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
-1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
-1283,1222,1960,1961,1962,1963,  36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
-1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
-  50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
- 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971,   7,
- 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
-1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
- 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
-1995, 560, 223,1287,  98,   8, 189, 650, 978,1288,1996,1437,1997,  17, 345, 250,
- 423, 277, 234, 512, 226,  97, 289,  42, 167,1998, 201,1999,2000, 843, 836, 824,
- 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
-2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008,  71,1440, 745,
- 619, 688,2009, 829,2010,2011, 147,2012,  33, 948,2013,2014,  74, 224,2015,  61,
- 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
-2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591,  52, 724, 246,2031,2032,
-2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
-2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
- 719,1170, 959, 440, 437, 534,  84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
- 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
-2051,2052,2053,  59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
- 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
-1444,2064,2065,  41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
-2069,1292,2070,2071,1445,2072,1446,2073,2074,  55, 588,  66,1447, 271,1092,2075,
-1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
-2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
-2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
-1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
- 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
-2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
-2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
-  22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174,  73,1096, 231, 274,
- 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
-2141,2142,2143,2144,  11, 374, 844,2145, 154,1232,  46,1461,2146, 838, 830, 721,
-1233, 106,2147,  90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
-2150,1462, 761, 565,2151, 686,2152, 649,2153,  72, 173,2154, 460, 415,2155,1463,
-2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
-2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177,  23, 530, 285,
-2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
-2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193,  10,
-2194, 613, 424,2195, 979, 108, 449, 589,  27, 172,  81,1031,  80, 774, 281, 350,
-1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
-2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
-2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
-2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
-2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
-2243, 521, 486, 548,2244,2245,2246,1473,1300,  53, 549, 137, 875,  76, 158,2247,
-1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
-1475,2249,  82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
-2256,  18, 450, 206,2257, 290, 292,1142,2258, 511, 162,  99, 346, 164, 735,2259,
-1476,1477,   4, 554, 343, 798,1099,2260,1100,2261,  43, 171,1303, 139, 215,2262,
-2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
-1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272,  67,2273,
- 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
-2282,2283,2284,2285,2286,  70, 852,1071,2287,2288,2289,2290,  21,  56, 509, 117,
- 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
-2294,1046,1479,2295, 340,2296,  63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
- 808, 494,2299,2300,2301, 903,2302,  37,1072,  14,   5,2303,  79, 675,2304, 312,
-2305,2306,2307,2308,2309,1480,   6,1307,2310,2311,2312,   1, 470,  35,  24, 229,
-2313, 695, 210,  86, 778,  15, 784, 592, 779,  32,  77, 855, 964,2314, 259,2315,
- 501, 380,2316,2317,  83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
-2320,2321,2322,2323,2324,2325,1485,2326,2327, 128,  57,  68, 261,1048, 211, 170,
-1240,  31,2328,  51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
- 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
-1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
-2351,1490,1491,  62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
-1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
-2361,2362, 332,  12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
- 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
-2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
-1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
-2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
-1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
-2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
-1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
- 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
-2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
-2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
- 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
- 915, 489,2449,1514,1184,2450,2451, 515,  64, 427, 495,2452, 583,2453, 483, 485,
-1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
-1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
- 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
-2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
-2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
- 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187,  65,2494,
- 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
- 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
-2499,2500,  49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
-  95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
- 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
-2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
-2533,  25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
- 704, 504, 468, 758, 657,1528, 196,  44, 839,1246, 272, 750,2543, 765, 862,2544,
-2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
-1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
- 249,1075,2556,2557,2558, 466, 743,2559,2560,2561,  92, 514, 426, 420, 526,2562,
-2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
-2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
-2584,1532,  54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
-   3, 458,   9,  38,2588, 107, 110, 890, 209,  26, 737, 498,2589,1534,2590, 431,
- 202,  88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
- 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
-2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601,  94, 175, 197, 406,
-2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
-2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
-1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
-2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
- 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642,  # 512, 256
-)
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py
deleted file mode 100644
index 345a060..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euckrprober.py
+++ /dev/null
@@ -1,47 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCKRDistributionAnalysis
-from .mbcssm import EUCKR_SM_MODEL
-
-
-class EUCKRProber(MultiByteCharSetProber):
-    def __init__(self):
-        super(EUCKRProber, self).__init__()
-        self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
-        self.distribution_analyzer = EUCKRDistributionAnalysis()
-        self.reset()
-
-    @property
-    def charset_name(self):
-        return "EUC-KR"
-
-    @property
-    def language(self):
-        return "Korean"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py
deleted file mode 100644
index ed7a995..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwfreq.py
+++ /dev/null
@@ -1,387 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# EUCTW frequency table
-# Converted from big5 work
-# by Taiwan's Mandarin Promotion Council
-# 
-
-# 128  --> 0.42261
-# 256  --> 0.57851
-# 512  --> 0.74851
-# 1024 --> 0.89384
-# 2048 --> 0.97583
-#
-# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
-# Random Distribution Ration = 512/(5401-512)=0.105
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
-
-EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
-
-# Char to FreqOrder table ,
-EUCTW_TABLE_SIZE = 5376
-
-EUCTW_CHAR_TO_FREQ_ORDER = (
-   1,1800,1506, 255,1431, 198,   9,  82,   6,7310, 177, 202,3615,1256,2808, 110,  # 2742
-3735,  33,3241, 261,  76,  44,2113,  16,2931,2184,1176, 659,3868,  26,3404,2643,  # 2758
-1198,3869,3313,4060, 410,2211, 302, 590, 361,1963,   8, 204,  58,4296,7311,1931,  # 2774
-  63,7312,7313, 317,1614,  75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809,  # 2790
-3616,   3,  10,3870,1471,  29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315,  # 2806
-4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932,  34,3501,3173,  64, 604,  # 2822
-7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337,  72, 406,7319,  80,  # 2838
- 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449,  69,2969, 591,  # 2854
- 179,2095, 471, 115,2034,1843,  60,  50,2970, 134, 806,1868, 734,2035,3407, 180,  # 2870
- 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359,  # 2886
-2495,  90,2707,1338, 663,  11, 906,1099,2545,  20,2436, 182, 532,1716,7321, 732,  # 2902
-1376,4062,1311,1420,3175,  25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529,  # 2918
-3243, 475,1447,3617,7322, 117,  21, 656, 810,1297,2295,2329,3502,7323, 126,4063,  # 2934
- 706, 456, 150, 613,4299,  71,1118,2036,4064, 145,3069,  85, 835, 486,2114,1246,  # 2950
-1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221,  # 2966
-3503,3110,7325,1955,1153,4065,  83, 296,1199,3070, 192, 624,  93,7326, 822,1897,  # 2982
-2810,3111, 795,2064, 991,1554,1542,1592,  27,  43,2853, 859, 139,1456, 860,4300,  # 2998
- 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618,  # 3014
-3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228,  # 3030
-1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077,  # 3046
-7328,7329,2173,3176,3619,2673, 593, 845,1062,3244,  88,1723,2037,3875,1950, 212,  # 3062
- 266, 152, 149, 468,1898,4066,4302,  77, 187,7330,3018,  37,   5,2972,7331,3876,  # 3078
-7332,7333,  39,2517,4303,2894,3177,2078,  55, 148,  74,4304, 545, 483,1474,1029,  # 3094
-1665, 217,1869,1531,3113,1104,2645,4067,  24, 172,3507, 900,3877,3508,3509,4305,  # 3110
-  32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674,   4,3019,3314,1427,1788,  # 3126
- 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520,  # 3142
-3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439,  38,7339,1063,7340, 794,  # 3158
-3879,1435,2296,  46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804,  35, 707,  # 3174
- 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409,  # 3190
-2128,1363,3623,1423, 697, 100,3071,  48,  70,1231, 495,3114,2193,7345,1294,7346,  # 3206
-2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411,  # 3222
- 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412,  # 3238
- 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933,  # 3254
-3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895,  # 3270
-1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369,  # 3286
-1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000,  # 3302
-1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381,   7,  # 3318
-2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313,  # 3334
- 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513,  # 3350
-4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647,  # 3366
-1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357,  # 3382
-7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438,  # 3398
-2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978,  # 3414
- 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210,  # 3430
-  98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642,  # 3446
- 523,2776,2777,2648,7364, 141,2231,1333,  68, 176, 441, 876, 907,4077, 603,2592,  # 3462
- 710, 171,3417, 404, 549,  18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320,  # 3478
-7366,2973, 368,7367, 146, 366,  99, 871,3627,1543, 748, 807,1586,1185,  22,2258,  # 3494
- 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702,  # 3510
-1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371,  59,7372,  # 3526
- 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836,  # 3542
- 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629,  # 3558
-7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686,  # 3574
-1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496,  # 3590
- 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560,  # 3606
-3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496,  # 3622
-4081,  57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082,  # 3638
-3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083,  # 3654
- 279,3120,  51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264,  # 3670
- 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411,  # 3686
-1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483,  # 3702
-4084,2468,1436, 953,4085,2054,4331, 671,2395,  79,4086,2441,3252, 608, 567,2680,  # 3718
-3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672,  # 3734
-3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681,  # 3750
-2397,7400,7401,7402,4089,3025,   0,7403,2469, 315, 231,2442, 301,3319,4335,2380,  # 3766
-7404, 233,4090,3631,1818,4336,4337,7405,  96,1776,1315,2082,7406, 257,7407,1809,  # 3782
-3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183,  # 3798
-7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934,  # 3814
-1484,7413,1712, 127,  67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351,  # 3830
-2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545,  # 3846
-1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358,  # 3862
-  78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338,  # 3878
-1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423,  # 3894
-4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859,  # 3910
-3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636,  # 3926
- 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344,  # 3942
- 165, 243,4345,3637,2521, 123, 683,4096, 764,4346,  36,3895,1792, 589,2902, 816,  # 3958
- 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891,  # 3974
-2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662,  # 3990
-7425, 611,1156, 854,2381,1316,2861,   2, 386, 515,2904,7426,7427,3253, 868,2234,  # 4006
-1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431,  # 4022
-2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676,  # 4038
-1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437,  # 4054
-1993,7438,4350,7439,7440,2195,  13,2779,3638,2980,3124,1229,1916,7441,3756,2131,  # 4070
-7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307,  # 4086
-7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519,  # 4102
-7452, 128,2132,  92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980,  # 4118
-3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401,  # 4134
-4353,2248,  94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101,  # 4150
-1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937,  # 4166
-7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466,  # 4182
-2332,2067,  23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526,  # 4198
-7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598,  # 4214
-3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471,  # 4230
-3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863,  41,7473,  # 4246
-7474,4361,7475,1657,2333,  19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323,  # 4262
-2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416,  # 4278
-7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427,  # 4294
- 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110,  # 4310
-4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485,  # 4326
-2683, 733,  40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428,  # 4342
-7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907,  # 4358
-3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901,  # 4374
-2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870,  # 4390
-2752,2986,7490, 435,7491, 343,1108, 596,  17,1751,4365,2235,3430,3643,7492,4366,  # 4406
- 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031,  # 4422
-2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240,  # 4438
-1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521,  # 4454
-1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673,  # 4470
-2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260,  # 4486
-1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619,  # 4502
-7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506,  # 4518
-7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382,  # 4534
-2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324,  # 4550
-4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384,  # 4566
-1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551,  30,2263,4122,  # 4582
-7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192,  # 4598
- 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388,  # 4614
-4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129,  # 4630
- 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523,  # 4646
-2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692,  # 4662
- 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915,  # 4678
-1041,2987, 293,1168,  87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219,  # 4694
-1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825,  # 4710
- 730,1515, 184,2827,  66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975,  # 4726
-3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394,  # 4742
-3918,7535,7536,1186,  15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758,  # 4758
-1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434,  # 4774
-3541,1342,1681,1718, 766,3264, 286,  89,2946,3649,7540,1713,7541,2597,3334,2990,  # 4790
-7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335,  # 4806
-7544,3265, 310, 313,3435,2299, 770,4134,  54,3034, 189,4397,3082,3769,3922,7545,  # 4822
-1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137,  # 4838
-2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471,  # 4854
-1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555,  # 4870
-3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139,  # 4886
-2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729,  # 4902
-3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482,  # 4918
-2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652,  # 4934
-4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867,  # 4950
-4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499,  # 4966
-3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250,  # 4982
-  97,  81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830,  # 4998
-3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188,  # 5014
- 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408,  # 5030
-3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447,  # 5046
-3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527,  # 5062
-3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932,  # 5078
-1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411,  # 5094
-7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270,  # 5110
- 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589,  # 5126
-7590, 587,  14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591,  # 5142
-1702,1226, 102,1547,  62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756,  # 5158
- 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145,  # 5174
-4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598,  86,1494,1730,  # 5190
-3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069,  # 5206
- 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938,  # 5222
-2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625,  # 5238
-2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885,  28,2686,  # 5254
-3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797,  # 5270
-1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958,  # 5286
-4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528,  # 5302
-2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241,  # 5318
-1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169,  # 5334
-1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540,  # 5350
-2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342,  # 5366
-3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425,  # 5382
-1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427,  # 5398
-7617,3446,7618,7619,7620,3277,2689,1433,3278, 131,  95,1504,3946, 723,4159,3141,  # 5414
-1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949,  # 5430
-4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654,  53,7624,2996,7625,  # 5446
-1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202,  # 5462
- 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640,  # 5478
-1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936,  # 5494
-3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955,  # 5510
-3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910,  # 5526
-2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325,  # 5542
-1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024,  # 5558
-4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340,  # 5574
- 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918,  # 5590
-7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439,  # 5606
-2317,3283,7650,7651,4164,7652,4165,  84,4166, 112, 989,7653, 547,1059,3961, 701,  # 5622
-3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494,  # 5638
-4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285,  # 5654
- 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077,  # 5670
-7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443,  # 5686
-7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169,  # 5702
-1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906,  # 5718
-4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968,  # 5734
-3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804,  # 5750
-2690,1516,3559,1121,1082,1329,3284,3970,1449,3794,  65,1128,2835,2913,2759,1590,  # 5766
-3795,7674,7675,  12,2658,  45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676,  # 5782
-3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680,  # 5798
-2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285,  # 5814
-1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687,  # 5830
-4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454,  # 5846
-3670,1858,  91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403,  # 5862
-3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973,  # 5878
-2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454,  # 5894
-4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761,  61,3976,3672,1822,3977,  # 5910
-7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695,  # 5926
-3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945,  # 5942
-2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460,  # 5958
-3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179,  # 5974
-1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706,  # 5990
-2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982,  # 6006
-3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183,  # 6022
-4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043,  56,1396,3090,  # 6038
-2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717,  # 6054
-2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985,  # 6070
-7722,1076,  49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184,  # 6086
-1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472,  # 6102
-2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351,  # 6118
-1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714,  # 6134
-3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404,  # 6150
-4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629,  31,2838,  # 6166
-2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620,  # 6182
-3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738,  # 6198
-3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869,  # 6214
-2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558,  # 6230
-4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107,  # 6246
-2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216,  # 6262
-3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984,  # 6278
-4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705,  # 6294
-7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687,  # 6310
-3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840,  # 6326
- 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521,  # 6342
-1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412,  42,3096, 464,7759,2632,  # 6358
-4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295,  # 6374
-1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765,  # 6390
-4487,7766,3002, 962, 588,3574, 289,3219,2634,1116,  52,7767,3047,1796,7768,7769,  # 6406
-7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572,  # 6422
- 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776,  # 6438
-7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911,  # 6454
-2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693,  # 6470
-1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672,  # 6486
-1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013,  # 6502
-3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816,  # 6518
- 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010,  # 6534
- 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175,  # 6550
- 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473,  # 6566
-3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298,  # 6582
-2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359,  # 6598
- 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805,  # 6614
-7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807,  # 6630
-1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810,  # 6646
-3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812,  # 6662
-7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814,  # 6678
-1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818,  # 6694
-7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821,  # 6710
-4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877,  # 6726
-1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702,  # 6742
-2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813,  # 6758
-2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503,  # 6774
-4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484,  # 6790
- 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833,  # 6806
- 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457,  # 6822
-3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704,  # 6838
-3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878,  # 6854
-1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508,  # 6870
-2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451,  # 6886
-7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509,  # 6902
-1561,2664,1452,4010,1375,7855,7856,  47,2959, 316,7857,1406,1591,2923,3156,7858,  # 6918
-1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428,  # 6934
-3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800,  # 6950
- 919,2347,2960,2348,1270,4511,4012,  73,7862,7863, 647,7864,3228,2843,2255,1550,  # 6966
-1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347,  # 6982
-4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515,  # 6998
-7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665,  # 7014
-2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518,  # 7030
-3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833,  # 7046
- 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961,  # 7062
-1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508,  # 7078
-2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482,  # 7094
-2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098,  # 7110
-7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483,  # 7126
-7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834,  # 7142
-7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904,  # 7158
-2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724,  # 7174
-2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910,  # 7190
-1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701,  # 7206
-4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062,  # 7222
-3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922,  # 7238
-3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925,  # 7254
-4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248,  # 7270
-4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487,  # 7286
-2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015,  # 7302
-2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935,  # 7318
-7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104,  # 7334
-4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580,  # 7350
-7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380,  # 7366
-2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951,  # 7382
-1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948,  # 7398
-3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488,  # 7414
-4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737,  # 7430
-2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017,  # 7446
- 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047,  # 7462
-2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967,  # 7478
-1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385,  # 7494
-2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975,  # 7510
-2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979,  # 7526
-4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982,  # 7542
-7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306,  # 7558
-1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270,  # 7574
-3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012,  # 7590
-7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236,  # 7606
-1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550,  # 7622
-8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746,  # 7638
-2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066,  # 7654
-8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977,  # 7670
-2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009,  # 7686
-2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013,  # 7702
-8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552,  # 7718
-8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023,  # 7734
-8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143,  # 7750
- 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278,  # 7766
-8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698,  # 7782
-4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706,  # 7798
-3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859,  # 7814
-8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344,  # 7830
-1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894,  # 7846
-8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194,  # 7862
- 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760,  # 7878
-1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210,  # 7894
- 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642,  # 7910
-4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013,  # 7926
-1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889,  # 7942
-4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239,  # 7958
-1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240,  # 7974
- 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083,  # 7990
-3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088,  # 8006
-4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094,  # 8022
-8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101,  # 8038
- 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104,  # 8054
-3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015,  # 8070
- 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941,  # 8086
-2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118,  # 8102
-)
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py
deleted file mode 100644
index 35669cc..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/euctwprober.py
+++ /dev/null
@@ -1,46 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import EUCTWDistributionAnalysis
-from .mbcssm import EUCTW_SM_MODEL
-
-class EUCTWProber(MultiByteCharSetProber):
-    def __init__(self):
-        super(EUCTWProber, self).__init__()
-        self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
-        self.distribution_analyzer = EUCTWDistributionAnalysis()
-        self.reset()
-
-    @property
-    def charset_name(self):
-        return "EUC-TW"
-
-    @property
-    def language(self):
-        return "Taiwan"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py
deleted file mode 100644
index 697837b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312freq.py
+++ /dev/null
@@ -1,283 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# GB2312 most frequently used character table
-#
-# Char to FreqOrder table , from hz6763
-
-# 512  --> 0.79  -- 0.79
-# 1024 --> 0.92  -- 0.13
-# 2048 --> 0.98  -- 0.06
-# 6768 --> 1.00  -- 0.02
-#
-# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
-# Random Distribution Ration = 512 / (3755 - 512) = 0.157
-#
-# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
-
-GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
-
-GB2312_TABLE_SIZE = 3760
-
-GB2312_CHAR_TO_FREQ_ORDER = (
-1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
-2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
-2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
- 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
-1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
-1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
- 152,1687,1539, 738,1559,  59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
-1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850,  70,3285,2729,3534,3575,
-2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
-3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
- 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
-1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
- 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
-2534,1546,2393,2760, 737,2494,  13, 447, 245,2747,  38,2765,2129,2589,1079, 606,
- 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
-2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
-1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
-3195,4115,5627,2489,2991,  24,2065,2697,1087,2719,  48,1634, 315,  68, 985,2052,
- 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
-1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
- 253,3099,  32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
-2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
-1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563,  26,
-3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
-1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
-2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
-1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
- 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
-3777,3657, 643,2298,1148,1779, 190, 989,3544, 414,  11,2135,2063,2979,1471, 403,
-3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
- 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
-3651, 210,  33,1608,2516, 200,1520, 415, 102,   0,3389,1287, 817,  91,3299,2940,
- 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687,  20,1819, 121,
-1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
-3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
-2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680,  72, 842,1990, 212,1233,
-1154,1586,  75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
- 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
-1910, 534, 529,3309,1721,1660, 274,  39,2827, 661,2670,1578, 925,3248,3815,1094,
-4278,4901,4252,  41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
- 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
-3568, 194,5062,  15, 961,3870,1241,1192,2664,  66,5215,3260,2111,1295,1127,2152,
-3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426,  53,2909,
- 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
-1272,2363, 284,1753,3679,4064,1695,  81, 815,2677,2757,2731,1386, 859, 500,4221,
-2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
-1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
-1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
- 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
-3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
-3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640,  67,2360,
-4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
- 296,3979,1739,1611,3684,  23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
-3116,  17,1074, 467,2692,2201, 387,2922,  45,1326,3055,1645,3659,2817, 958, 243,
-1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
-1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
-4046,3572,2399,1571,3281,  79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
- 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
- 814,4968,3487,1548,2644,1567,1285,   2, 295,2636,  97, 946,3576, 832, 141,4257,
-3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
-1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
- 602,1525,2608,1605,1639,3175, 694,3064,  10, 465,  76,2000,4846,4208, 444,3781,
-1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
-2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844,  89, 937,
- 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
- 432, 445,2811, 206,4136,1472, 730, 349,  73, 397,2802,2547, 998,1637,1167, 789,
- 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
-3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
-4996, 371,1575,2436,1621,2210, 984,4033,1734,2638,  16,4529, 663,2755,3255,1451,
-3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
- 750,2058, 165,  80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
-2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
-2357, 395,3740, 137,2075, 944,4089,2584,1267,3802,  62,1533,2285, 178, 176, 780,
-2440, 201,3707, 590, 478,1560,4354,2117,1075,  30,  74,4643,4004,1635,1441,2745,
- 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
-2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
- 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669,  43,2523,1657,
- 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
- 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
-3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
-2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
-2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024,  40,3240,1536,
-1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
-  18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
-2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
-  90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
- 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
-1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
-1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076,  46,4253,2873,1889,1894,
- 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
- 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
-1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
-2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
-3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
-2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
-2269,2246,1446,  36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
-2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
-3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
-1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906,  51, 369, 170,3541,
-1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
-2101,2730,2490,  82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
-1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
-3750,2289,2795, 813,3123,2610,1136,4368,   5,3391,4541,2174, 420, 429,1728, 754,
-1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
-1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
-3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
- 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
-2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
-1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
-4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
-1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
-1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
-3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
-1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
-  47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
- 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096,  99,
-1397,1769,2300,4428,1643,3455,1978,1757,3718,1440,  35,4879,3742,1296,4228,2280,
- 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
-1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
-1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
- 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
-3708, 135,2131,  87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
-4314,   9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
-3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
-2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
-2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
-1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
-3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
-2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
-1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
-1505,1911,1883,3526, 698,3629,3456,1833,1431, 746,  77,1261,2017,2296,1977,1885,
- 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
-2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
-2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
-3192,2910,2010, 140,2395,2859,  55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
-4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
-3399,  98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
- 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
-3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
-2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
-1086,1974,2034, 630, 257,3338,2788,4903,1017,  86,4790, 966,2789,1995,1696,1131,
- 259,3095,4188,1308, 179,1463,5257, 289,4107,1248,  42,3413,1725,2288, 896,1947,
- 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
-3034,3310, 540,2370,1562,1288,2990, 502,4765,1147,   4,1853,2708, 207, 294,2814,
-4078,2902,2509, 684,  34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
-2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
-1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
-1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
- 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
-1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196,  19, 941,3624,3480,
-3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
- 955,1089,3103,1053,  96,  88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
- 642,4006, 903,2539,1877,2082, 596,  29,4066,1790, 722,2157, 130, 995,1569, 769,
-1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445,  50, 625, 487,2207,
-  57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
-1783, 362,   8,3433,3422, 610,2793,3277,1390,1284,1654,  21,3823, 734, 367, 623,
- 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
-2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
- 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
-2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
-2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
-1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
-1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
-2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
- 819,1541, 142,2284,  44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
-1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
-1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
-2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
-2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434,  92,1466,4920,2616,
-3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
-1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
-4462,  64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
- 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
- 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
-3264,2855,2722,1952,1029,2839,2467,  84,4383,2215, 820,1391,2015,2448,3672, 377,
-1948,2168, 797,2545,3536,2578,2645,  94,2874,1678, 405,1259,3071, 771, 546,1315,
- 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928,  14,2594, 557,
-3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
-1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
-4031,2641,4067,3145,1870,  37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
-1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
-2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
-1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
- 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
-1178,2639,2351,  93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
-3341,1618,4126,2595,2334, 603, 651,  69, 701, 268,2662,3411,2555,1380,1606, 503,
- 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
-2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
- 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
-1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
-1281,  52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169,  27,
-1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
-3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
-2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
-3891,2868,3621,2254,  58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
-3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
-3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
- 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
-2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
- 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
-2724,1927,2333,4440, 567,  22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
-  12, 974,3783,4391, 951,1412,   1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
-1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040,  31,
- 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
- 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
-1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
-3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
-3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118,  63,2076, 314,1881,
-1348,1061, 172, 978,3515,1747, 532, 511,3970,   6, 601, 905,2699,3300,1751, 276,
-1467,3725,2668,  65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
-3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
-2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
-2754,  95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
-1985, 244,2546, 474, 495,1046,2611,1851,2061,  71,2089,1675,2590, 742,3758,2843,
-3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
- 451,   3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
-4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
-1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
-2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078,  49,3770,
-3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
-3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
-1197,1663,4476,3127,  85,4240,2528,  25,1111,1181,3673, 407,3470,4561,2679,2713,
- 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
- 391,2963, 187,  61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
-2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
- 931, 317,2517,3027, 325, 569, 686,2107,3084,  60,1042,1333,2794, 264,3177,4014,
-1628, 258,3712,   7,4464,1176,1043,1778, 683, 114,1975,  78,1492, 383,1886, 510,
- 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
-1282,1289,4609, 697,1453,3044,2666,3611,1856,2412,  54, 719,1330, 568,3778,2459,
-1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
-1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
-1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421,  56,1908,1640,2387,2232,
-1917,1874,2477,4921, 148,  83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
- 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
- 852,1221,1400,1486, 882,2299,4036, 351,  28,1122, 700,6479,6480,6481,6482,6483,  #last 512
-)
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py
deleted file mode 100644
index 8446d2d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/gb2312prober.py
+++ /dev/null
@@ -1,46 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import GB2312DistributionAnalysis
-from .mbcssm import GB2312_SM_MODEL
-
-class GB2312Prober(MultiByteCharSetProber):
-    def __init__(self):
-        super(GB2312Prober, self).__init__()
-        self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
-        self.distribution_analyzer = GB2312DistributionAnalysis()
-        self.reset()
-
-    @property
-    def charset_name(self):
-        return "GB2312"
-
-    @property
-    def language(self):
-        return "Chinese"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py
deleted file mode 100644
index b0e1bf4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/hebrewprober.py
+++ /dev/null
@@ -1,292 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-#          Shy Shalom
-# Portions created by the Initial Developer are Copyright (C) 2005
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .enums import ProbingState
-
-# This prober doesn't actually recognize a language or a charset.
-# It is a helper prober for the use of the Hebrew model probers
-
-### General ideas of the Hebrew charset recognition ###
-#
-# Four main charsets exist in Hebrew:
-# "ISO-8859-8" - Visual Hebrew
-# "windows-1255" - Logical Hebrew
-# "ISO-8859-8-I" - Logical Hebrew
-# "x-mac-hebrew" - ?? Logical Hebrew ??
-#
-# Both "ISO" charsets use a completely identical set of code points, whereas
-# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
-# these code points. windows-1255 defines additional characters in the range
-# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
-# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
-# x-mac-hebrew defines similar additional code points but with a different
-# mapping.
-#
-# As far as an average Hebrew text with no diacritics is concerned, all four
-# charsets are identical with respect to code points. Meaning that for the
-# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
-# (including final letters).
-#
-# The dominant difference between these charsets is their directionality.
-# "Visual" directionality means that the text is ordered as if the renderer is
-# not aware of a BIDI rendering algorithm. The renderer sees the text and
-# draws it from left to right. The text itself when ordered naturally is read
-# backwards. A buffer of Visual Hebrew generally looks like so:
-# "[last word of first line spelled backwards] [whole line ordered backwards
-# and spelled backwards] [first word of first line spelled backwards]
-# [end of line] [last word of second line] ... etc' "
-# adding punctuation marks, numbers and English text to visual text is
-# naturally also "visual" and from left to right.
-#
-# "Logical" directionality means the text is ordered "naturally" according to
-# the order it is read. It is the responsibility of the renderer to display
-# the text from right to left. A BIDI algorithm is used to place general
-# punctuation marks, numbers and English text in the text.
-#
-# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
-# what little evidence I could find, it seems that its general directionality
-# is Logical.
-#
-# To sum up all of the above, the Hebrew probing mechanism knows about two
-# charsets:
-# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
-#    backwards while line order is natural. For charset recognition purposes
-#    the line order is unimportant (In fact, for this implementation, even
-#    word order is unimportant).
-# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
-#
-# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
-#    specifically identified.
-# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
-#    that contain special punctuation marks or diacritics is displayed with
-#    some unconverted characters showing as question marks. This problem might
-#    be corrected using another model prober for x-mac-hebrew. Due to the fact
-#    that x-mac-hebrew texts are so rare, writing another model prober isn't
-#    worth the effort and performance hit.
-#
-#### The Prober ####
-#
-# The prober is divided between two SBCharSetProbers and a HebrewProber,
-# all of which are managed, created, fed data, inquired and deleted by the
-# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
-# fact some kind of Hebrew, Logical or Visual. The final decision about which
-# one is it is made by the HebrewProber by combining final-letter scores
-# with the scores of the two SBCharSetProbers to produce a final answer.
-#
-# The SBCSGroupProber is responsible for stripping the original text of HTML
-# tags, English characters, numbers, low-ASCII punctuation characters, spaces
-# and new lines. It reduces any sequence of such characters to a single space.
-# The buffer fed to each prober in the SBCS group prober is pure text in
-# high-ASCII.
-# The two SBCharSetProbers (model probers) share the same language model:
-# Win1255Model.
-# The first SBCharSetProber uses the model normally as any other
-# SBCharSetProber does, to recognize windows-1255, upon which this model was
-# built. The second SBCharSetProber is told to make the pair-of-letter
-# lookup in the language model backwards. This in practice exactly simulates
-# a visual Hebrew model using the windows-1255 logical Hebrew model.
-#
-# The HebrewProber is not using any language model. All it does is look for
-# final-letter evidence suggesting the text is either logical Hebrew or visual
-# Hebrew. Disjointed from the model probers, the results of the HebrewProber
-# alone are meaningless. HebrewProber always returns 0.00 as confidence
-# since it never identifies a charset by itself. Instead, the pointer to the
-# HebrewProber is passed to the model probers as a helper "Name Prober".
-# When the Group prober receives a positive identification from any prober,
-# it asks for the name of the charset identified. If the prober queried is a
-# Hebrew model prober, the model prober forwards the call to the
-# HebrewProber to make the final decision. In the HebrewProber, the
-# decision is made according to the final-letters scores maintained and Both
-# model probers scores. The answer is returned in the form of the name of the
-# charset identified, either "windows-1255" or "ISO-8859-8".
-
-class HebrewProber(CharSetProber):
-    # windows-1255 / ISO-8859-8 code points of interest
-    FINAL_KAF = 0xea
-    NORMAL_KAF = 0xeb
-    FINAL_MEM = 0xed
-    NORMAL_MEM = 0xee
-    FINAL_NUN = 0xef
-    NORMAL_NUN = 0xf0
-    FINAL_PE = 0xf3
-    NORMAL_PE = 0xf4
-    FINAL_TSADI = 0xf5
-    NORMAL_TSADI = 0xf6
-
-    # Minimum Visual vs Logical final letter score difference.
-    # If the difference is below this, don't rely solely on the final letter score
-    # distance.
-    MIN_FINAL_CHAR_DISTANCE = 5
-
-    # Minimum Visual vs Logical model score difference.
-    # If the difference is below this, don't rely at all on the model score
-    # distance.
-    MIN_MODEL_DISTANCE = 0.01
-
-    VISUAL_HEBREW_NAME = "ISO-8859-8"
-    LOGICAL_HEBREW_NAME = "windows-1255"
-
-    def __init__(self):
-        super(HebrewProber, self).__init__()
-        self._final_char_logical_score = None
-        self._final_char_visual_score = None
-        self._prev = None
-        self._before_prev = None
-        self._logical_prober = None
-        self._visual_prober = None
-        self.reset()
-
-    def reset(self):
-        self._final_char_logical_score = 0
-        self._final_char_visual_score = 0
-        # The two last characters seen in the previous buffer,
-        # mPrev and mBeforePrev are initialized to space in order to simulate
-        # a word delimiter at the beginning of the data
-        self._prev = ' '
-        self._before_prev = ' '
-        # These probers are owned by the group prober.
-
-    def set_model_probers(self, logicalProber, visualProber):
-        self._logical_prober = logicalProber
-        self._visual_prober = visualProber
-
-    def is_final(self, c):
-        return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN,
-                     self.FINAL_PE, self.FINAL_TSADI]
-
-    def is_non_final(self, c):
-        # The normal Tsadi is not a good Non-Final letter due to words like
-        # 'lechotet' (to chat) containing an apostrophe after the tsadi. This
-        # apostrophe is converted to a space in FilterWithoutEnglishLetters
-        # causing the Non-Final tsadi to appear at an end of a word even
-        # though this is not the case in the original text.
-        # The letters Pe and Kaf rarely display a related behavior of not being
-        # a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
-        # for example legally end with a Non-Final Pe or Kaf. However, the
-        # benefit of these letters as Non-Final letters outweighs the damage
-        # since these words are quite rare.
-        return c in [self.NORMAL_KAF, self.NORMAL_MEM,
-                     self.NORMAL_NUN, self.NORMAL_PE]
-
-    def feed(self, byte_str):
-        # Final letter analysis for logical-visual decision.
-        # Look for evidence that the received buffer is either logical Hebrew
-        # or visual Hebrew.
-        # The following cases are checked:
-        # 1) A word longer than 1 letter, ending with a final letter. This is
-        #    an indication that the text is laid out "naturally" since the
-        #    final letter really appears at the end. +1 for logical score.
-        # 2) A word longer than 1 letter, ending with a Non-Final letter. In
-        #    normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
-        #    should not end with the Non-Final form of that letter. Exceptions
-        #    to this rule are mentioned above in isNonFinal(). This is an
-        #    indication that the text is laid out backwards. +1 for visual
-        #    score
-        # 3) A word longer than 1 letter, starting with a final letter. Final
-        #    letters should not appear at the beginning of a word. This is an
-        #    indication that the text is laid out backwards. +1 for visual
-        #    score.
-        #
-        # The visual score and logical score are accumulated throughout the
-        # text and are finally checked against each other in GetCharSetName().
-        # No checking for final letters in the middle of words is done since
-        # that case is not an indication for either Logical or Visual text.
-        #
-        # We automatically filter out all 7-bit characters (replace them with
-        # spaces) so the word boundary detection works properly. [MAP]
-
-        if self.state == ProbingState.NOT_ME:
-            # Both model probers say it's not them. No reason to continue.
-            return ProbingState.NOT_ME
-
-        byte_str = self.filter_high_byte_only(byte_str)
-
-        for cur in byte_str:
-            if cur == ' ':
-                # We stand on a space - a word just ended
-                if self._before_prev != ' ':
-                    # next-to-last char was not a space so self._prev is not a
-                    # 1 letter word
-                    if self.is_final(self._prev):
-                        # case (1) [-2:not space][-1:final letter][cur:space]
-                        self._final_char_logical_score += 1
-                    elif self.is_non_final(self._prev):
-                        # case (2) [-2:not space][-1:Non-Final letter][
-                        #  cur:space]
-                        self._final_char_visual_score += 1
-            else:
-                # Not standing on a space
-                if ((self._before_prev == ' ') and
-                        (self.is_final(self._prev)) and (cur != ' ')):
-                    # case (3) [-2:space][-1:final letter][cur:not space]
-                    self._final_char_visual_score += 1
-            self._before_prev = self._prev
-            self._prev = cur
-
-        # Forever detecting, till the end or until both model probers return
-        # ProbingState.NOT_ME (handled above)
-        return ProbingState.DETECTING
-
-    @property
-    def charset_name(self):
-        # Make the decision: is it Logical or Visual?
-        # If the final letter score distance is dominant enough, rely on it.
-        finalsub = self._final_char_logical_score - self._final_char_visual_score
-        if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
-            return self.LOGICAL_HEBREW_NAME
-        if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
-            return self.VISUAL_HEBREW_NAME
-
-        # It's not dominant enough, try to rely on the model scores instead.
-        modelsub = (self._logical_prober.get_confidence()
-                    - self._visual_prober.get_confidence())
-        if modelsub > self.MIN_MODEL_DISTANCE:
-            return self.LOGICAL_HEBREW_NAME
-        if modelsub < -self.MIN_MODEL_DISTANCE:
-            return self.VISUAL_HEBREW_NAME
-
-        # Still no good, back to final letter distance, maybe it'll save the
-        # day.
-        if finalsub < 0.0:
-            return self.VISUAL_HEBREW_NAME
-
-        # (finalsub > 0 - Logical) or (don't know what to do) default to
-        # Logical.
-        return self.LOGICAL_HEBREW_NAME
-
-    @property
-    def language(self):
-        return 'Hebrew'
-
-    @property
-    def state(self):
-        # Remain active as long as any of the model probers are active.
-        if (self._logical_prober.state == ProbingState.NOT_ME) and \
-           (self._visual_prober.state == ProbingState.NOT_ME):
-            return ProbingState.NOT_ME
-        return ProbingState.DETECTING
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py
deleted file mode 100644
index 83fc082..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jisfreq.py
+++ /dev/null
@@ -1,325 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# Sampling from about 20M text materials include literature and computer technology
-#
-# Japanese frequency table, applied to both S-JIS and EUC-JP
-# They are sorted in order.
-
-# 128  --> 0.77094
-# 256  --> 0.85710
-# 512  --> 0.92635
-# 1024 --> 0.97130
-# 2048 --> 0.99431
-#
-# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
-# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
-#
-# Typical Distribution Ratio, 25% of IDR
-
-JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
-
-# Char to FreqOrder table ,
-JIS_TABLE_SIZE = 4368
-
-JIS_CHAR_TO_FREQ_ORDER = (
-  40,   1,   6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, #   16
-3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247,  18, 179,5071, 856,1661, #   32
-1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, #   48
-2042,1061,1062,  48,  49,  44,  45, 433, 434,1040,1041, 996, 787,2997,1255,4305, #   64
-2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, #   80
-5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, #   96
-1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, #  112
-5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, #  128
-5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, #  144
-5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, #  160
-5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, #  176
-5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, #  192
-5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, #  208
-1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, #  224
-1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, #  240
-1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, #  256
-2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, #  272
-3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161,  26,3377,   2,3929,  20, #  288
-3691,  47,4100,  50,  17,  16,  35, 268,  27, 243,  42, 155,  24, 154,  29, 184, #  304
-   4,  91,  14,  92,  53, 396,  33, 289,   9,  37,  64, 620,  21,  39, 321,   5, #  320
-  12,  11,  52,  13,   3, 208, 138,   0,   7,  60, 526, 141, 151,1069, 181, 275, #  336
-1591,  83, 132,1475, 126, 331, 829,  15,  69, 160,  59,  22, 157,  55,1079, 312, #  352
- 109,  38,  23,  25,  10,  19,  79,5195,  61, 382,1124,   8,  30,5196,5197,5198, #  368
-5199,5200,5201,5202,5203,5204,5205,5206,  89,  62,  74,  34,2416, 112, 139, 196, #  384
- 271, 149,  84, 607, 131, 765,  46,  88, 153, 683,  76, 874, 101, 258,  57,  80, #  400
-  32, 364, 121,1508, 169,1547,  68, 235, 145,2999,  41, 360,3027,  70,  63,  31, #  416
-  43, 259, 262,1383,  99, 533, 194,  66,  93, 846, 217, 192,  56, 106,  58, 565, #  432
- 280, 272, 311, 256, 146,  82, 308,  71, 100, 128, 214, 655, 110, 261, 104,1140, #  448
-  54,  51,  36,  87,  67,3070, 185,2618,2936,2020,  28,1066,2390,2059,5207,5208, #  464
-5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, #  480
-5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, #  496
-5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, #  512
-4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, #  528
-5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, #  544
-5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, #  560
-5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, #  576
-5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, #  592
-5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, #  608
-5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, #  624
-5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, #  640
-5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, #  656
-5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, #  672
-3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, #  688
-5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, #  704
-5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, #  720
-5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, #  736
-5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, #  752
-5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, #  768
-5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, #  784
-5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, #  800
-5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, #  816
-5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, #  832
-5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, #  848
-5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, #  864
-5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, #  880
-5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, #  896
-5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, #  912
-5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, #  928
-5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, #  944
-5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, #  960
-5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, #  976
-5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, #  992
-5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
-5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
-5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
-5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
-5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
-5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
-5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
-5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
-5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
-5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
-5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
-5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
-5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
-5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
-5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
-5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
-5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
-5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
-5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
-6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
-6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
-6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
-6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
-6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
-6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
-6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
-6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
-4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
- 854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
- 665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
-1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619,  65,3302,2045, # 1488
-1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
- 896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
-3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
-3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
- 804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
-3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
-3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
- 586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
-2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
- 277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
-3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
-1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
- 380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
-1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
- 850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
-2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
-2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
-2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
-2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
-1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
-1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
-1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
-1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
-2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
-1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
-2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
-1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
-1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
-1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
-1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
-1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
-1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
- 606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
- 684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
-1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
-2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
-2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
-2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
-3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
-3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
- 884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
-3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
-1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876,  78,2287,1482,1277, # 2176
- 861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
-2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
-1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
- 576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
-3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
-4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
-2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
-1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
-2601,1919,1078,  75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
-1075, 292,3818,1756,2602, 317,  98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
- 385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
- 178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
-1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
-2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
-2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
-2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
-3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
-1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
-2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
- 359,2291,1676,  73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
- 837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
- 855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
-1209,  96, 587,2166,1032, 260,1072,2153, 173,  94, 226,3244, 819,2006,4642,4114, # 2544
-2203, 231,1744, 782,  97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
- 633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
-1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
-1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
- 353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
-1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
-1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
-1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
- 764,2861,1853, 688,2429,1920,1462,  77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
-2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
- 278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
-2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
-3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
-2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
-1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
-6147, 441, 762,1771,3447,3607,3608,1904, 840,3037,  86, 939,1385, 572,1370,2445, # 2800
-1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
-2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
-1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
- 470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
-  72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
-3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
-3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
-1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
-1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
-1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
-1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
- 123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
- 913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
-2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
- 900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
-3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
-2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
- 423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
-1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
-2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
- 220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
-1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
- 745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
-4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
-2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
-1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
- 666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
-1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
-2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
- 376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
-6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
-1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
-1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
-2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
-3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
- 914,2550,2587,  81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
-3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
-1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
- 674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
-1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
- 199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
-3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
- 370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
-2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
- 414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
-4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
-2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
-1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
-1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
-1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
- 166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
-1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
-3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
-1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
-3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
- 264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
- 543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
- 983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
-2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
-1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
- 867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
-1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
- 894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
-1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
- 530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
- 839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
- 480,2083,1774,3458, 923,2279,1350, 221,3086,  85,2233,2234,3835,1585,3010,2147, # 3872
-1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
-1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
-2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
-4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
- 227,1351,1645,2453,2193,1421,2887, 812,2121, 634,  95,2435, 201,2312,4665,1646, # 3952
-1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
- 328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
-1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
-3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
-1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
-2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
-2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
-1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
-1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
-2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
- 455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
-2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
-1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
-1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
-1279,2136,1697,2335, 204, 721,2097,3838,  90,6186,2085,2505, 191,3967, 124,2148, # 4192
-1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
-3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
-2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
-2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
- 575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
-3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
-3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
-1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
-2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
-1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
-2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368  #last 512
-)
-
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py
deleted file mode 100644
index 20044e4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/jpcntx.py
+++ /dev/null
@@ -1,233 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-
-# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
-jp2CharContext = (
-(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
-(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
-(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
-(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
-(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
-(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
-(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
-(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
-(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
-(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
-(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
-(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
-(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
-(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
-(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
-(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
-(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
-(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
-(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
-(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
-(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
-(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
-(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
-(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
-(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
-(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
-(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
-(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
-(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
-(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
-(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
-(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
-(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
-(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
-(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
-(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
-(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
-(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
-(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
-(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
-(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
-(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
-(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
-(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
-(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
-(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
-(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
-(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
-(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
-(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
-(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
-(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
-(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
-(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
-(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
-(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
-(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
-(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
-(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
-(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
-(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
-(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
-(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
-(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
-(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
-(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
-(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
-(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
-(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
-(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
-(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
-(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
-(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
-(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
-(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
-(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
-)
-
-class JapaneseContextAnalysis(object):
-    NUM_OF_CATEGORY = 6
-    DONT_KNOW = -1
-    ENOUGH_REL_THRESHOLD = 100
-    MAX_REL_THRESHOLD = 1000
-    MINIMUM_DATA_THRESHOLD = 4
-
-    def __init__(self):
-        self._total_rel = None
-        self._rel_sample = None
-        self._need_to_skip_char_num = None
-        self._last_char_order = None
-        self._done = None
-        self.reset()
-
-    def reset(self):
-        self._total_rel = 0  # total sequence received
-        # category counters, each integer counts sequence in its category
-        self._rel_sample = [0] * self.NUM_OF_CATEGORY
-        # if last byte in current buffer is not the last byte of a character,
-        # we need to know how many bytes to skip in next buffer
-        self._need_to_skip_char_num = 0
-        self._last_char_order = -1  # The order of previous char
-        # If this flag is set to True, detection is done and conclusion has
-        # been made
-        self._done = False
-
-    def feed(self, byte_str, num_bytes):
-        if self._done:
-            return
-
-        # The buffer we got is byte oriented, and a character may span in more than one
-        # buffers. In case the last one or two byte in last buffer is not
-        # complete, we record how many byte needed to complete that character
-        # and skip these bytes here.  We can choose to record those bytes as
-        # well and analyse the character once it is complete, but since a
-        # character will not make much difference, by simply skipping
-        # this character will simply our logic and improve performance.
-        i = self._need_to_skip_char_num
-        while i < num_bytes:
-            order, char_len = self.get_order(byte_str[i:i + 2])
-            i += char_len
-            if i > num_bytes:
-                self._need_to_skip_char_num = i - num_bytes
-                self._last_char_order = -1
-            else:
-                if (order != -1) and (self._last_char_order != -1):
-                    self._total_rel += 1
-                    if self._total_rel > self.MAX_REL_THRESHOLD:
-                        self._done = True
-                        break
-                    self._rel_sample[jp2CharContext[self._last_char_order][order]] += 1
-                self._last_char_order = order
-
-    def got_enough_data(self):
-        return self._total_rel > self.ENOUGH_REL_THRESHOLD
-
-    def get_confidence(self):
-        # This is just one way to calculate confidence. It works well for me.
-        if self._total_rel > self.MINIMUM_DATA_THRESHOLD:
-            return (self._total_rel - self._rel_sample[0]) / self._total_rel
-        else:
-            return self.DONT_KNOW
-
-    def get_order(self, byte_str):
-        return -1, 1
-
-class SJISContextAnalysis(JapaneseContextAnalysis):
-    def __init__(self):
-        super(SJISContextAnalysis, self).__init__()
-        self._charset_name = "SHIFT_JIS"
-
-    @property
-    def charset_name(self):
-        return self._charset_name
-
-    def get_order(self, byte_str):
-        if not byte_str:
-            return -1, 1
-        # find out current char's byte length
-        first_char = byte_str[0]
-        if (0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC):
-            char_len = 2
-            if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
-                self._charset_name = "CP932"
-        else:
-            char_len = 1
-
-        # return its order if it is hiragana
-        if len(byte_str) > 1:
-            second_char = byte_str[1]
-            if (first_char == 202) and (0x9F <= second_char <= 0xF1):
-                return second_char - 0x9F, char_len
-
-        return -1, char_len
-
-class EUCJPContextAnalysis(JapaneseContextAnalysis):
-    def get_order(self, byte_str):
-        if not byte_str:
-            return -1, 1
-        # find out current char's byte length
-        first_char = byte_str[0]
-        if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
-            char_len = 2
-        elif first_char == 0x8F:
-            char_len = 3
-        else:
-            char_len = 1
-
-        # return its order if it is hiragana
-        if len(byte_str) > 1:
-            second_char = byte_str[1]
-            if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
-                return second_char - 0xA1, char_len
-
-        return -1, char_len
-
-
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
deleted file mode 100644
index 2aa4fb2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langbulgarianmodel.py
+++ /dev/null
@@ -1,228 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-# this table is modified base on win1251BulgarianCharToOrderMap, so
-# only number <64 is sure valid
-
-Latin5_BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,  # 80
-210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,  # 90
- 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238,  # a0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # b0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56,  # c0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # d0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16,  # e0
- 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253,  # f0
-)
-
-win1251BulgarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82,  # 40
-110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253,  # 50
-253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71,  # 60
-116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253,  # 70
-206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220,  # 80
-221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229,  # 90
- 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240,  # a0
- 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250,  # b0
- 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30,  # c0
- 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56,  # d0
-  1, 18,  9, 20, 11,  3, 23, 15,  2, 26, 12, 10, 14,  6,  4, 13,  # e0
-  7,  8,  5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16,  # f0
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 96.9392%
-# first 1024 sequences:3.0618%
-# rest  sequences:     0.2992%
-# negative sequences:  0.0020%
-BulgarianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
-3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
-0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
-0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
-0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
-0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
-0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
-2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
-3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
-3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
-1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
-3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
-1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
-2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
-2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
-3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
-1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
-2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
-2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
-1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
-2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
-2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
-2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
-1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
-2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
-1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
-3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
-1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
-3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
-1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
-2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
-1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
-2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
-1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
-2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
-1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
-2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
-1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
-2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
-1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
-0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
-1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
-1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
-1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
-0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
-0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
-1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
-1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
-1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
-1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-)
-
-Latin5BulgarianModel = {
-  'char_to_order_map': Latin5_BulgarianCharToOrderMap,
-  'precedence_matrix': BulgarianLangModel,
-  'typical_positive_ratio': 0.969392,
-  'keep_english_letter': False,
-  'charset_name': "ISO-8859-5",
-  'language': 'Bulgairan',
-}
-
-Win1251BulgarianModel = {
-  'char_to_order_map': win1251BulgarianCharToOrderMap,
-  'precedence_matrix': BulgarianLangModel,
-  'typical_positive_ratio': 0.969392,
-  'keep_english_letter': False,
-  'charset_name': "windows-1251",
-  'language': 'Bulgarian',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py
deleted file mode 100644
index e5f9a1f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langcyrillicmodel.py
+++ /dev/null
@@ -1,333 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# KOI8-R language model
-# Character Mapping Table:
-KOI8R_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,  # 80
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,  # 90
-223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237,  # a0
-238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,  # b0
- 27,  3, 21, 28, 13,  2, 39, 19, 26,  4, 23, 11,  8, 12,  5,  1,  # c0
- 15, 16,  9,  7,  6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54,  # d0
- 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34,  # e0
- 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70,  # f0
-)
-
-win1251_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253,
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-)
-
-latin5_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
-)
-
-macCyrillic_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255,
-)
-
-IBM855_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
-191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205,
-206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70,
-  3, 37, 21, 44, 28, 58, 13, 41,  2, 48, 39, 53, 19, 46,218,219,
-220,221,222,223,224, 26, 55,  4, 42,225,226,227,228, 23, 60,229,
-230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243,
-  8, 49, 12, 38,  5, 31,  1, 34, 15,244,245,246,247, 35, 16,248,
- 43,  9, 45,  7, 32,  6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249,
-250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255,
-)
-
-IBM866_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154,  # 40
-155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253,  # 50
-253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69,  # 60
- 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253,  # 70
- 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35,
- 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43,
-  3, 21, 10, 19, 13,  2, 24, 20,  4, 23, 11,  8, 12,  5,  1, 15,
-191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,
-207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,
-223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,
-  9,  7,  6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16,
-239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 97.6601%
-# first 1024 sequences: 2.3389%
-# rest  sequences:      0.1237%
-# negative sequences:   0.0009%
-RussianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2,
-3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
-0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1,
-0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0,
-0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1,
-1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1,
-1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0,
-2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1,
-1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0,
-3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1,
-1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0,
-2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2,
-1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1,
-1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1,
-1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
-2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1,
-1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0,
-3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2,
-1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1,
-2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1,
-1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0,
-2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0,
-0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1,
-1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0,
-1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1,
-1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0,
-3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1,
-2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1,
-3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1,
-1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,
-1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1,
-0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1,
-1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0,
-1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1,
-0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1,
-1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2,
-2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1,
-1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0,
-1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0,
-2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,
-1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,
-0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,
-2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1,
-1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1,
-1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
-0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1,
-0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1,
-0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
-1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,
-0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,
-0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1,
-0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,
-2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0,
-0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
-)
-
-Koi8rModel = {
-  'char_to_order_map': KOI8R_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "KOI8-R",
-  'language': 'Russian',
-}
-
-Win1251CyrillicModel = {
-  'char_to_order_map': win1251_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "windows-1251",
-  'language': 'Russian',
-}
-
-Latin5CyrillicModel = {
-  'char_to_order_map': latin5_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "ISO-8859-5",
-  'language': 'Russian',
-}
-
-MacCyrillicModel = {
-  'char_to_order_map': macCyrillic_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "MacCyrillic",
-  'language': 'Russian',
-}
-
-Ibm866Model = {
-  'char_to_order_map': IBM866_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "IBM866",
-  'language': 'Russian',
-}
-
-Ibm855Model = {
-  'char_to_order_map': IBM855_char_to_order_map,
-  'precedence_matrix': RussianLangModel,
-  'typical_positive_ratio': 0.976601,
-  'keep_english_letter': False,
-  'charset_name': "IBM855",
-  'language': 'Russian',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py
deleted file mode 100644
index 5332221..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langgreekmodel.py
+++ /dev/null
@@ -1,225 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-Latin7_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
- 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
-253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
- 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
-253,233, 90,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
-253,253,253,253,247,248, 61, 36, 46, 71, 73,253, 54,253,108,123,  # b0
-110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
- 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
-124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
-  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
-)
-
-win1253_char_to_order_map = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 82,100,104, 94, 98,101,116,102,111,187,117, 92, 88,113, 85,  # 40
- 79,118,105, 83, 67,114,119, 95, 99,109,188,253,253,253,253,253,  # 50
-253, 72, 70, 80, 81, 60, 96, 93, 89, 68,120, 97, 77, 86, 69, 55,  # 60
- 78,115, 65, 66, 58, 76,106,103, 87,107,112,253,253,253,253,253,  # 70
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 80
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 90
-253,233, 61,253,253,253,253,253,253,253,253,253,253, 74,253,253,  # a0
-253,253,253,253,247,253,253, 36, 46, 71, 73,253, 54,253,108,123,  # b0
-110, 31, 51, 43, 41, 34, 91, 40, 52, 47, 44, 53, 38, 49, 59, 39,  # c0
- 35, 48,250, 37, 33, 45, 56, 50, 84, 57,120,121, 17, 18, 22, 15,  # d0
-124,  1, 29, 20, 21,  3, 32, 13, 25,  5, 11, 16, 10,  6, 30,  4,  # e0
-  9,  8, 14,  7,  2, 12, 28, 23, 42, 24, 64, 75, 19, 26, 27,253,  # f0
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 98.2851%
-# first 1024 sequences:1.7001%
-# rest  sequences:     0.0359%
-# negative sequences:  0.0148%
-GreekLangModel = (
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,2,2,3,3,3,3,3,3,3,3,1,3,3,3,0,2,2,3,3,0,3,0,3,2,0,3,3,3,0,
-3,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,0,3,3,0,3,2,3,3,0,3,2,3,3,3,0,0,3,0,3,0,3,3,2,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,2,3,2,2,3,3,3,3,3,3,3,3,0,3,3,3,3,0,2,3,3,0,3,3,3,3,2,3,3,3,0,
-2,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,2,1,3,3,3,3,2,3,3,2,3,3,2,0,
-0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,2,3,3,0,
-2,0,1,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,3,0,0,0,0,3,3,0,3,1,3,3,3,0,3,3,0,3,3,3,3,0,0,0,0,
-2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,0,3,0,3,3,3,3,3,0,3,2,2,2,3,0,2,3,3,3,3,3,2,3,3,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,3,2,2,2,3,3,3,3,0,3,1,3,3,3,3,2,3,3,3,3,3,3,3,2,2,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,3,0,0,0,3,3,2,3,3,3,3,3,0,0,3,2,3,0,2,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,3,0,0,3,3,0,2,3,0,3,0,3,3,3,0,0,3,0,3,0,2,2,3,3,0,0,
-0,0,1,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,3,2,3,3,3,3,0,3,3,3,3,3,0,3,3,2,3,2,3,3,2,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,2,3,2,3,3,3,3,3,3,0,2,3,2,3,2,2,2,3,2,3,3,2,3,0,2,2,2,3,0,
-2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,0,3,3,3,2,3,3,0,0,3,0,3,0,0,0,3,2,0,3,0,3,0,0,2,0,2,0,
-0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,3,3,0,3,0,0,0,3,3,0,3,3,3,0,0,1,2,3,0,
-3,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,2,0,0,3,2,2,3,3,0,3,3,3,3,3,2,1,3,0,3,2,3,3,2,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,3,0,2,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,3,0,3,2,3,0,0,3,3,3,0,
-3,0,0,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,0,3,3,3,3,3,3,0,0,3,0,3,0,0,0,3,2,0,3,2,3,0,0,3,2,3,0,
-2,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,1,2,2,3,3,3,3,3,3,0,2,3,0,3,0,0,0,3,3,0,3,0,2,0,0,2,3,1,0,
-2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,3,0,3,0,3,3,2,3,0,3,3,3,3,3,3,0,3,3,3,0,2,3,0,0,3,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,0,0,3,0,0,0,3,3,0,3,0,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,0,3,3,3,3,3,3,0,0,3,0,2,0,0,0,3,3,0,3,0,3,0,0,2,0,2,0,
-0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,3,0,3,0,2,0,3,2,0,3,2,3,2,3,0,0,3,2,3,2,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,2,3,3,3,3,3,0,0,0,3,0,2,1,0,0,3,2,2,2,0,3,0,0,2,2,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,2,0,3,0,3,0,3,3,0,2,1,2,3,3,0,0,3,0,3,0,3,3,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,3,0,3,3,3,3,3,3,0,2,3,0,3,0,0,0,2,1,0,2,2,3,0,0,2,2,2,0,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,3,0,0,2,3,3,3,2,3,0,0,1,3,0,2,0,0,0,0,3,0,1,0,2,0,0,1,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,3,1,0,3,0,0,0,3,2,0,3,2,3,3,3,0,0,3,0,3,2,2,2,1,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,3,3,3,0,0,3,0,0,0,0,2,0,2,3,3,2,2,2,2,3,0,2,0,2,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,3,3,3,2,0,0,0,0,0,0,2,3,0,2,0,2,3,2,0,0,3,0,3,0,3,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,3,2,3,3,2,2,3,0,2,0,3,0,0,0,2,0,0,0,0,1,2,0,2,0,2,0,
-0,2,0,2,0,2,2,0,0,1,0,2,2,2,0,2,2,2,0,2,2,2,0,0,2,0,0,1,0,0,0,0,
-0,2,0,3,3,2,0,0,0,0,0,0,1,3,0,2,0,2,2,2,0,0,2,0,3,0,0,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,0,2,3,2,0,2,2,0,2,0,2,2,0,2,0,2,2,2,0,0,0,0,0,0,2,3,0,0,0,2,
-0,1,2,0,0,0,0,2,2,0,0,0,2,1,0,2,2,0,0,0,0,0,0,1,0,2,0,0,0,0,0,0,
-0,0,2,1,0,2,3,2,2,3,2,3,2,0,0,3,3,3,0,0,3,2,0,0,0,1,1,0,2,0,2,2,
-0,2,0,2,0,2,2,0,0,2,0,2,2,2,0,2,2,2,2,0,0,2,0,0,0,2,0,1,0,0,0,0,
-0,3,0,3,3,2,2,0,3,0,0,0,2,2,0,2,2,2,1,2,0,0,1,2,2,0,0,3,0,0,0,2,
-0,1,2,0,0,0,1,2,0,0,0,0,0,0,0,2,2,0,1,0,0,2,0,0,0,2,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,3,3,2,2,0,0,0,2,0,2,3,3,0,2,0,0,0,0,0,0,2,2,2,0,2,2,0,2,0,2,
-0,2,2,0,0,2,2,2,2,1,0,0,2,2,0,2,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,
-0,2,0,3,2,3,0,0,0,3,0,0,2,2,0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,0,2,
-0,0,2,2,0,0,2,2,2,0,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,3,2,0,2,2,2,2,2,0,0,0,2,0,0,0,0,2,0,1,0,0,2,0,1,0,0,0,
-0,2,2,2,0,2,2,0,1,2,0,2,2,2,0,2,2,2,2,1,2,2,0,0,2,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,2,0,2,0,2,2,0,0,0,0,1,2,1,0,0,2,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,3,2,3,0,0,2,0,0,0,2,2,0,2,0,0,0,1,0,0,2,0,2,0,2,2,0,0,0,0,
-0,0,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,
-0,2,2,3,2,2,0,0,0,0,0,0,1,3,0,2,0,2,2,0,0,0,1,0,2,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,0,2,0,3,2,0,2,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-0,0,2,0,0,0,0,1,1,0,0,2,1,2,0,2,2,0,1,0,0,1,0,0,0,2,0,0,0,0,0,0,
-0,3,0,2,2,2,0,0,2,0,0,0,2,0,0,0,2,3,0,2,0,0,0,0,0,0,2,2,0,0,0,2,
-0,1,2,0,0,0,1,2,2,1,0,0,0,2,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,1,2,0,2,2,0,2,0,0,2,0,0,0,0,1,2,1,0,2,1,0,0,0,0,0,0,0,0,0,0,
-0,0,2,0,0,0,3,1,2,2,0,2,0,0,0,0,2,0,0,0,2,0,0,3,0,0,0,0,2,2,2,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,1,0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,2,
-0,2,2,0,0,2,2,2,2,2,0,1,2,0,0,0,2,2,0,1,0,2,0,0,2,2,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,2,
-0,1,2,0,0,0,0,2,2,1,0,1,0,1,0,2,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,2,0,1,2,0,0,0,0,0,0,0,0,0,0,2,0,0,2,2,0,0,0,0,1,0,0,0,0,0,0,2,
-0,2,2,0,0,0,0,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,
-0,2,2,2,2,0,0,0,3,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,1,
-0,0,2,0,0,0,0,1,2,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,2,0,2,2,2,0,0,2,0,0,0,0,0,0,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,2,
-0,0,1,0,0,0,0,2,1,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,3,0,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,2,
-0,0,2,0,0,0,0,2,2,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,2,0,2,2,1,0,0,0,0,0,0,2,0,0,2,0,2,2,2,0,0,0,0,0,0,2,0,0,0,0,2,
-0,0,2,0,0,2,0,2,2,0,0,0,0,2,0,2,0,0,0,0,0,2,0,0,0,2,0,0,0,0,0,0,
-0,0,3,0,0,0,2,2,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,2,0,0,0,0,0,
-0,2,2,2,2,2,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,
-0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,2,2,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,2,0,0,0,2,0,0,0,0,0,1,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,2,0,0,0,
-0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,2,0,2,0,0,0,
-0,0,0,0,0,0,0,0,2,1,0,0,0,0,0,0,2,0,0,0,1,2,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-)
-
-Latin7GreekModel = {
-  'char_to_order_map': Latin7_char_to_order_map,
-  'precedence_matrix': GreekLangModel,
-  'typical_positive_ratio': 0.982851,
-  'keep_english_letter': False,
-  'charset_name': "ISO-8859-7",
-  'language': 'Greek',
-}
-
-Win1253GreekModel = {
-  'char_to_order_map': win1253_char_to_order_map,
-  'precedence_matrix': GreekLangModel,
-  'typical_positive_ratio': 0.982851,
-  'keep_english_letter': False,
-  'charset_name': "windows-1253",
-  'language': 'Greek',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py
deleted file mode 100644
index 58f4c87..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhebrewmodel.py
+++ /dev/null
@@ -1,200 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-#          Simon Montagu
-# Portions created by the Initial Developer are Copyright (C) 2005
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Shoshannah Forbes - original C code (?)
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Windows-1255 language model
-# Character Mapping Table:
-WIN1255_CHAR_TO_ORDER_MAP = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85,  # 40
- 78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253,  # 50
-253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49,  # 60
- 66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253,  # 70
-124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
-215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
- 34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
-106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
- 30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
-238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
-  9,  8, 20, 16,  3,  2, 24, 14, 22,  1, 25, 15,  4, 11,  6, 23,
- 12, 19, 13, 26, 18, 27, 21, 17,  7, 10,  5,251,252,128, 96,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 98.4004%
-# first 1024 sequences: 1.5981%
-# rest  sequences:      0.087%
-# negative sequences:   0.0015%
-HEBREW_LANG_MODEL = (
-0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
-3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
-1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
-1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
-1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
-1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
-1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
-0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
-0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
-1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
-0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
-0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
-0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
-0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
-0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
-0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
-0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
-0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
-0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
-3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
-0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
-0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
-0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
-1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
-0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
-3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
-0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
-0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
-0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
-0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
-2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
-0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
-0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
-0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
-0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
-1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
-0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
-2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
-1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
-2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
-1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
-2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
-0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
-1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
-0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
-)
-
-Win1255HebrewModel = {
-  'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP,
-  'precedence_matrix': HEBREW_LANG_MODEL,
-  'typical_positive_ratio': 0.984004,
-  'keep_english_letter': False,
-  'charset_name': "windows-1255",
-  'language': 'Hebrew',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py
deleted file mode 100644
index bb7c095..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langhungarianmodel.py
+++ /dev/null
@@ -1,225 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-Latin2_HungarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
- 46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
-253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
- 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
-159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
-175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
-191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
- 79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
-221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
-232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
- 82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
-245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
-)
-
-win1250HungarianCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
- 46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
-253,  2, 18, 26, 17,  1, 27, 12, 20,  9, 22,  7,  6, 13,  4,  8,
- 23, 67, 10,  5,  3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
-161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
-177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
-191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
- 81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
-221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
-232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
- 84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
-245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 94.7368%
-# first 1024 sequences:5.2623%
-# rest  sequences:     0.8894%
-# negative sequences:  0.0009%
-HungarianLangModel = (
-0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
-3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
-3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
-3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
-0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
-0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
-3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
-0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
-0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
-3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
-1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
-1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
-1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
-3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
-2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
-2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
-2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
-2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
-2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
-3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
-2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
-2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
-2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
-1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
-1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
-3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
-1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
-1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
-2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
-2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
-2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
-3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
-2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
-1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
-1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
-2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
-2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
-1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
-1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
-2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
-1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
-1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
-2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
-2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
-2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
-1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
-1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
-1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
-0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
-2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
-2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
-1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
-2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
-1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
-1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
-2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
-2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
-2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
-1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
-2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
-0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
-0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
-2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
-)
-
-Latin2HungarianModel = {
-  'char_to_order_map': Latin2_HungarianCharToOrderMap,
-  'precedence_matrix': HungarianLangModel,
-  'typical_positive_ratio': 0.947368,
-  'keep_english_letter': True,
-  'charset_name': "ISO-8859-2",
-  'language': 'Hungarian',
-}
-
-Win1250HungarianModel = {
-  'char_to_order_map': win1250HungarianCharToOrderMap,
-  'precedence_matrix': HungarianLangModel,
-  'typical_positive_ratio': 0.947368,
-  'keep_english_letter': True,
-  'charset_name': "windows-1250",
-  'language': 'Hungarian',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py
deleted file mode 100644
index 15f94c2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langthaimodel.py
+++ /dev/null
@@ -1,199 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# The following result for thai was collected from a limited sample (1M).
-
-# Character Mapping Table:
-TIS620CharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255,  # 00
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,  # 10
-253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,  # 20
-252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253,  # 30
-253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111,  # 40
-188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253,  # 50
-253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82,  # 60
- 96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253,  # 70
-209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
-223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
-236,  5, 30,237, 24,238, 75,  8, 26, 52, 34, 51,119, 47, 58, 57,
- 49, 53, 55, 43, 20, 19, 44, 14, 48,  3, 17, 25, 39, 62, 31, 54,
- 45,  9, 16,  2, 61, 15,239, 12, 42, 46, 18, 21, 76,  4, 66, 63,
- 22, 10,  1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
- 11, 28, 41, 29, 33,245, 50, 37,  6,  7, 67, 77, 38, 93,246,247,
- 68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
-)
-
-# Model Table:
-# total sequences: 100%
-# first 512 sequences: 92.6386%
-# first 1024 sequences:7.3177%
-# rest  sequences:     1.0230%
-# negative sequences:  0.0436%
-ThaiLangModel = (
-0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
-0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
-3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
-0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
-3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
-3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
-3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
-3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
-3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
-3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
-2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
-3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
-0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
-3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
-0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
-3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
-1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
-3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
-3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
-1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
-0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
-0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
-3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
-2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
-3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
-0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
-3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
-3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
-2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
-3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
-2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
-3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
-3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
-3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
-3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
-3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
-1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
-0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
-0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
-3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
-3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
-1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
-3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
-3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
-0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
-0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
-1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
-1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
-3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
-0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
-0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
-3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
-0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
-0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
-0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
-0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
-0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
-0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
-0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
-3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
-0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
-0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
-3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
-2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
-0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
-3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
-1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
-1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
-1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
-1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-)
-
-TIS620ThaiModel = {
-  'char_to_order_map': TIS620CharToOrderMap,
-  'precedence_matrix': ThaiLangModel,
-  'typical_positive_ratio': 0.926386,
-  'keep_english_letter': False,
-  'charset_name': "TIS-620",
-  'language': 'Thai',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py
deleted file mode 100644
index a427a45..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/langturkishmodel.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# -*- coding: utf-8 -*-
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Communicator client code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Özgür Baskın - Turkish Language Model
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-# 255: Control characters that usually does not exist in any text
-# 254: Carriage/Return
-# 253: symbol (punctuation) that does not belong to word
-# 252: 0 - 9
-
-# Character Mapping Table:
-Latin5_TurkishCharToOrderMap = (
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
-255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,
-255, 23, 37, 47, 39, 29, 52, 36, 45, 53, 60, 16, 49, 20, 46, 42,
- 48, 69, 44, 35, 31, 51, 38, 62, 65, 43, 56,255,255,255,255,255,
-255,  1, 21, 28, 12,  2, 18, 27, 25,  3, 24, 10,  5, 13,  4, 15,
- 26, 64,  7,  8,  9, 14, 32, 57, 58, 11, 22,255,255,255,255,255,
-180,179,178,177,176,175,174,173,172,171,170,169,168,167,166,165,
-164,163,162,161,160,159,101,158,157,156,155,154,153,152,151,106,
-150,149,148,147,146,145,144,100,143,142,141,140,139,138,137,136,
- 94, 80, 93,135,105,134,133, 63,132,131,130,129,128,127,126,125,
-124,104, 73, 99, 79, 85,123, 54,122, 98, 92,121,120, 91,103,119,
- 68,118,117, 97,116,115, 50, 90,114,113,112,111, 55, 41, 40, 86,
- 89, 70, 59, 78, 71, 82, 88, 33, 77, 66, 84, 83,110, 75, 61, 96,
- 30, 67,109, 74, 87,102, 34, 95, 81,108, 76, 72, 17,  6, 19,107,
-)
-
-TurkishLangModel = (
-3,2,3,3,3,1,3,3,3,3,3,3,3,3,2,1,1,3,3,1,3,3,0,3,3,3,3,3,0,3,1,3,
-3,2,1,0,0,1,1,0,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
-3,2,2,3,3,0,3,3,3,3,3,3,3,2,3,1,0,3,3,1,3,3,0,3,3,3,3,3,0,3,0,3,
-3,1,1,0,1,0,1,0,0,0,0,0,0,1,1,1,1,0,0,0,0,0,0,0,2,2,0,0,0,1,0,1,
-3,3,2,3,3,0,3,3,3,3,3,3,3,2,3,1,1,3,3,0,3,3,1,2,3,3,3,3,0,3,0,3,
-3,1,1,0,0,0,1,0,0,0,0,1,1,0,1,2,1,0,0,0,1,0,0,0,0,2,0,0,0,0,0,1,
-3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,1,3,3,2,0,3,2,1,2,2,1,3,3,0,0,0,2,
-2,2,0,1,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,1,0,0,1,
-3,3,3,2,3,3,1,2,3,3,3,3,3,3,3,1,3,2,1,0,3,2,0,1,2,3,3,2,1,0,0,2,
-2,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,
-1,0,1,3,3,1,3,3,3,3,3,3,3,1,2,0,0,2,3,0,2,3,0,0,2,2,2,3,0,3,0,1,
-2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,0,3,2,0,2,3,2,3,3,1,0,0,2,
-3,2,0,0,1,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1,0,2,0,0,1,
-3,3,3,2,3,3,2,3,3,3,3,2,3,3,3,0,3,3,0,0,2,1,0,0,2,3,2,2,0,0,0,2,
-2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,0,1,0,2,0,0,1,
-3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,0,1,3,2,1,1,3,2,3,2,1,0,0,2,
-2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,
-3,3,3,2,3,3,3,3,3,3,3,2,3,3,3,0,3,2,2,0,2,3,0,0,2,2,2,2,0,0,0,2,
-3,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
-3,3,3,3,3,3,3,2,2,2,2,3,2,3,3,0,3,3,1,1,2,2,0,0,2,2,3,2,0,0,1,3,
-0,3,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,
-3,3,3,2,3,3,3,2,1,2,2,3,2,3,3,0,3,2,0,0,1,1,0,1,1,2,1,2,0,0,0,1,
-0,3,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,1,0,0,0,
-3,3,3,2,3,3,2,3,2,2,2,3,3,3,3,1,3,1,1,0,3,2,1,1,3,3,2,3,1,0,0,1,
-1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,2,0,0,1,
-3,2,2,3,3,0,3,3,3,3,3,3,3,2,2,1,0,3,3,1,3,3,0,1,3,3,2,3,0,3,0,3,
-2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
-2,2,2,3,3,0,3,3,3,3,3,3,3,3,3,0,0,3,2,0,3,3,0,3,2,3,3,3,0,3,1,3,
-2,0,0,0,0,0,0,0,0,0,0,1,0,1,2,0,1,0,0,0,0,0,0,0,2,2,0,0,1,0,0,1,
-3,3,3,1,2,3,3,1,0,0,1,0,0,3,3,2,3,0,0,2,0,0,2,0,2,0,0,0,2,0,2,0,
-0,3,1,0,1,0,0,0,2,2,1,0,1,1,2,1,2,2,2,0,2,1,1,0,0,0,2,0,0,0,0,0,
-1,2,1,3,3,0,3,3,3,3,3,2,3,0,0,0,0,2,3,0,2,3,1,0,2,3,1,3,0,3,0,2,
-3,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,1,3,3,2,2,3,2,2,0,1,2,3,0,1,2,1,0,1,0,0,0,1,0,2,2,0,0,0,1,
-1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,
-3,3,3,1,3,3,1,1,3,3,1,1,3,3,1,0,2,1,2,0,2,1,0,0,1,1,2,1,0,0,0,2,
-2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,1,0,2,1,3,0,0,2,0,0,3,3,0,3,0,0,1,0,1,2,0,0,1,1,2,2,0,1,0,
-0,1,2,1,1,0,1,0,1,1,1,1,1,0,1,1,1,2,2,1,2,0,1,0,0,0,0,0,0,1,0,0,
-3,3,3,2,3,2,3,3,0,2,2,2,3,3,3,0,3,0,0,0,2,2,0,1,2,1,1,1,0,0,0,1,
-0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-3,3,3,3,3,3,2,1,2,2,3,3,3,3,2,0,2,0,0,0,2,2,0,0,2,1,3,3,0,0,1,1,
-1,1,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,
-1,1,2,3,3,0,3,3,3,3,3,3,2,2,0,2,0,2,3,2,3,2,2,2,2,2,2,2,1,3,2,3,
-2,0,2,1,2,2,2,2,1,1,2,2,1,2,2,1,2,0,0,2,1,1,0,2,1,0,0,1,0,0,0,1,
-2,3,3,1,1,1,0,1,1,1,2,3,2,1,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,
-0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,2,2,3,2,3,2,2,1,3,3,3,0,2,1,2,0,2,1,0,0,1,1,1,1,1,0,0,1,
-2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,2,0,1,0,0,0,
-3,3,3,2,3,3,3,3,3,2,3,1,2,3,3,1,2,0,0,0,0,0,0,0,3,2,1,1,0,0,0,0,
-2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
-3,3,3,2,2,3,3,2,1,1,1,1,1,3,3,0,3,1,0,0,1,1,0,0,3,1,2,1,0,0,0,0,
-0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,
-3,3,3,2,2,3,2,2,2,3,2,1,1,3,3,0,3,0,0,0,0,1,0,0,3,1,1,2,0,0,0,1,
-1,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
-1,1,1,3,3,0,3,3,3,3,3,2,2,2,1,2,0,2,1,2,2,1,1,0,1,2,2,2,2,2,2,2,
-0,0,2,1,2,1,2,1,0,1,1,3,1,2,1,1,2,0,0,2,0,1,0,1,0,1,0,0,0,1,0,1,
-3,3,3,1,3,3,3,0,1,1,0,2,2,3,1,0,3,0,0,0,1,0,0,0,1,0,0,1,0,1,0,0,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,2,0,0,2,2,1,0,0,1,0,0,3,3,1,3,0,0,1,1,0,2,0,3,0,0,0,2,0,1,1,
-0,1,2,0,1,2,2,0,2,2,2,2,1,0,2,1,1,0,2,0,2,1,2,0,0,0,0,0,0,0,0,0,
-3,3,3,1,3,2,3,2,0,2,2,2,1,3,2,0,2,1,2,0,1,2,0,0,1,0,2,2,0,0,0,2,
-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,
-3,3,3,0,3,3,1,1,2,3,1,0,3,2,3,0,3,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,
-1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,3,3,0,3,3,2,3,3,2,2,0,0,0,0,1,2,0,1,3,0,0,0,3,1,1,0,3,0,2,
-2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,1,2,2,1,0,3,1,1,1,1,3,3,2,3,0,0,1,0,1,2,0,2,2,0,2,2,0,2,1,
-0,2,2,1,1,1,1,0,2,1,1,0,1,1,1,1,2,1,2,1,2,0,1,0,1,0,0,0,0,0,0,0,
-3,3,3,0,1,1,3,0,0,1,1,0,0,2,2,0,3,0,0,1,1,0,1,0,0,0,0,0,2,0,0,0,
-0,3,1,0,1,0,1,0,2,0,0,1,0,1,0,1,1,1,2,1,1,0,2,0,0,0,0,0,0,0,0,0,
-3,3,3,0,2,0,2,0,1,1,1,0,0,3,3,0,2,0,0,1,0,0,2,1,1,0,1,0,1,0,1,0,
-0,2,0,1,2,0,2,0,2,1,1,0,1,0,2,1,1,0,2,1,1,0,1,0,0,0,1,1,0,0,0,0,
-3,2,3,0,1,0,0,0,0,0,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,0,2,0,0,0,
-0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,2,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,0,0,2,3,0,0,1,0,1,0,2,3,2,3,0,0,1,3,0,2,1,0,0,0,0,2,0,1,0,
-0,2,1,0,0,1,1,0,2,1,0,0,1,0,0,1,1,0,1,1,2,0,1,0,0,0,0,1,0,0,0,0,
-3,2,2,0,0,1,1,0,0,0,0,0,0,3,1,1,1,0,0,0,0,0,1,0,0,0,0,0,2,0,1,0,
-0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,3,3,0,2,3,2,2,1,2,2,1,1,2,0,1,3,2,2,2,0,0,2,2,0,0,0,1,2,1,
-3,0,2,1,1,0,1,1,1,0,1,2,2,2,1,1,2,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,
-0,1,1,2,3,0,3,3,3,2,2,2,2,1,0,1,0,1,0,1,2,2,0,0,2,2,1,3,1,1,2,1,
-0,0,1,1,2,0,1,1,0,0,1,2,0,2,1,1,2,0,0,1,0,0,0,1,0,1,0,1,0,0,0,0,
-3,3,2,0,0,3,1,0,0,0,0,0,0,3,2,1,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
-0,2,1,1,0,0,1,0,1,2,0,0,1,1,0,0,2,1,1,1,1,0,2,0,0,0,0,0,0,0,0,0,
-3,3,2,0,0,1,0,0,0,0,1,0,0,3,3,2,2,0,0,1,0,0,2,0,1,0,0,0,2,0,1,0,
-0,0,1,1,0,0,2,0,2,1,0,0,1,1,2,1,2,0,2,1,2,1,1,1,0,0,1,1,0,0,0,0,
-3,3,2,0,0,2,2,0,0,0,1,1,0,2,2,1,3,1,0,1,0,1,2,0,0,0,0,0,1,0,1,0,
-0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,2,0,0,0,1,0,0,1,0,0,2,3,1,2,0,0,1,0,0,2,0,0,0,1,0,2,0,2,0,
-0,1,1,2,2,1,2,0,2,1,1,0,0,1,1,0,1,1,1,1,2,1,1,0,0,0,0,0,0,0,0,0,
-3,3,3,0,2,1,2,1,0,0,1,1,0,3,3,1,2,0,0,1,0,0,2,0,2,0,1,1,2,0,0,0,
-0,0,1,1,1,1,2,0,1,1,0,1,1,1,1,0,0,0,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
-3,3,3,0,2,2,3,2,0,0,1,0,0,2,3,1,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,
-0,1,1,0,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,
-3,2,3,0,0,0,0,0,0,0,1,0,0,2,2,2,2,0,0,1,0,0,2,0,0,0,0,0,2,0,1,0,
-0,0,2,1,1,0,1,0,2,1,1,0,0,1,1,2,1,0,2,0,2,0,1,0,0,0,2,0,0,0,0,0,
-0,0,0,2,2,0,2,1,1,1,1,2,2,0,0,1,0,1,0,0,1,3,0,0,0,0,1,0,0,2,1,0,
-0,0,1,0,1,0,0,0,0,0,2,1,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
-2,0,0,2,3,0,2,3,1,2,2,0,2,0,0,2,0,2,1,1,1,2,1,0,0,1,2,1,1,2,1,0,
-1,0,2,0,1,0,1,1,0,0,2,2,1,2,1,1,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
-3,3,3,0,2,1,2,0,0,0,1,0,0,3,2,0,1,0,0,1,0,0,2,0,0,0,1,2,1,0,1,0,
-0,0,0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,0,1,1,1,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,2,2,0,2,2,1,1,0,1,1,1,1,1,0,0,1,2,1,1,1,0,1,0,0,0,1,1,1,1,
-0,0,2,1,0,1,1,1,0,1,1,2,1,2,1,1,2,0,1,1,2,1,0,2,0,0,0,0,0,0,0,0,
-3,2,2,0,0,2,0,0,0,0,0,0,0,2,2,0,2,0,0,1,0,0,2,0,0,0,0,0,2,0,0,0,
-0,2,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,3,2,0,2,2,0,1,1,0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,0,0,0,
-2,0,1,0,1,0,1,1,0,0,1,2,0,1,0,1,1,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,
-2,2,2,0,1,1,0,0,0,1,0,0,0,1,2,0,1,0,0,1,0,0,1,0,0,0,0,1,2,0,1,0,
-0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,2,1,0,1,1,1,0,0,0,0,1,2,0,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
-1,1,2,0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,
-0,0,1,2,2,0,2,1,2,1,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
-2,2,2,0,0,0,1,0,0,0,0,0,0,2,2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
-0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-2,2,2,0,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,1,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
-)
-
-Latin5TurkishModel = {
-  'char_to_order_map': Latin5_TurkishCharToOrderMap,
-  'precedence_matrix': TurkishLangModel,
-  'typical_positive_ratio': 0.970290,
-  'keep_english_letter': True,
-  'charset_name': "ISO-8859-9",
-  'language': 'Turkish',
-}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py
deleted file mode 100644
index 7d1e8c2..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/latin1prober.py
+++ /dev/null
@@ -1,145 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .enums import ProbingState
-
-FREQ_CAT_NUM = 4
-
-UDF = 0  # undefined
-OTH = 1  # other
-ASC = 2  # ascii capital letter
-ASS = 3  # ascii small letter
-ACV = 4  # accent capital vowel
-ACO = 5  # accent capital other
-ASV = 6  # accent small vowel
-ASO = 7  # accent small other
-CLASS_NUM = 8  # total classes
-
-Latin1_CharToClass = (
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 00 - 07
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 08 - 0F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 10 - 17
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 18 - 1F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 20 - 27
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 28 - 2F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 30 - 37
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 38 - 3F
-    OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 40 - 47
-    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 48 - 4F
-    ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC,   # 50 - 57
-    ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH,   # 58 - 5F
-    OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 60 - 67
-    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 68 - 6F
-    ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS,   # 70 - 77
-    ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH,   # 78 - 7F
-    OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH,   # 80 - 87
-    OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF,   # 88 - 8F
-    UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # 90 - 97
-    OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO,   # 98 - 9F
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A0 - A7
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # A8 - AF
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B0 - B7
-    OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH,   # B8 - BF
-    ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO,   # C0 - C7
-    ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV,   # C8 - CF
-    ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH,   # D0 - D7
-    ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO,   # D8 - DF
-    ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO,   # E0 - E7
-    ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV,   # E8 - EF
-    ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH,   # F0 - F7
-    ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO,   # F8 - FF
-)
-
-# 0 : illegal
-# 1 : very unlikely
-# 2 : normal
-# 3 : very likely
-Latin1ClassModel = (
-# UDF OTH ASC ASS ACV ACO ASV ASO
-    0,  0,  0,  0,  0,  0,  0,  0,  # UDF
-    0,  3,  3,  3,  3,  3,  3,  3,  # OTH
-    0,  3,  3,  3,  3,  3,  3,  3,  # ASC
-    0,  3,  3,  3,  1,  1,  3,  3,  # ASS
-    0,  3,  3,  3,  1,  2,  1,  2,  # ACV
-    0,  3,  3,  3,  3,  3,  3,  3,  # ACO
-    0,  3,  1,  3,  1,  1,  1,  3,  # ASV
-    0,  3,  1,  3,  1,  1,  3,  3,  # ASO
-)
-
-
-class Latin1Prober(CharSetProber):
-    def __init__(self):
-        super(Latin1Prober, self).__init__()
-        self._last_char_class = None
-        self._freq_counter = None
-        self.reset()
-
-    def reset(self):
-        self._last_char_class = OTH
-        self._freq_counter = [0] * FREQ_CAT_NUM
-        CharSetProber.reset(self)
-
-    @property
-    def charset_name(self):
-        return "ISO-8859-1"
-
-    @property
-    def language(self):
-        return ""
-
-    def feed(self, byte_str):
-        byte_str = self.filter_with_english_letters(byte_str)
-        for c in byte_str:
-            char_class = Latin1_CharToClass[c]
-            freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM)
-                                    + char_class]
-            if freq == 0:
-                self._state = ProbingState.NOT_ME
-                break
-            self._freq_counter[freq] += 1
-            self._last_char_class = char_class
-
-        return self.state
-
-    def get_confidence(self):
-        if self.state == ProbingState.NOT_ME:
-            return 0.01
-
-        total = sum(self._freq_counter)
-        if total < 0.01:
-            confidence = 0.0
-        else:
-            confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0)
-                          / total)
-        if confidence < 0.0:
-            confidence = 0.0
-        # lower the confidence of latin1 so that other more accurate
-        # detector can take priority.
-        confidence = confidence * 0.73
-        return confidence
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py
deleted file mode 100644
index 6256ecf..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcharsetprober.py
+++ /dev/null
@@ -1,91 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Proofpoint, Inc.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .enums import ProbingState, MachineState
-
-
-class MultiByteCharSetProber(CharSetProber):
-    """
-    MultiByteCharSetProber
-    """
-
-    def __init__(self, lang_filter=None):
-        super(MultiByteCharSetProber, self).__init__(lang_filter=lang_filter)
-        self.distribution_analyzer = None
-        self.coding_sm = None
-        self._last_char = [0, 0]
-
-    def reset(self):
-        super(MultiByteCharSetProber, self).reset()
-        if self.coding_sm:
-            self.coding_sm.reset()
-        if self.distribution_analyzer:
-            self.distribution_analyzer.reset()
-        self._last_char = [0, 0]
-
-    @property
-    def charset_name(self):
-        raise NotImplementedError
-
-    @property
-    def language(self):
-        raise NotImplementedError
-
-    def feed(self, byte_str):
-        for i in range(len(byte_str)):
-            coding_state = self.coding_sm.next_state(byte_str[i])
-            if coding_state == MachineState.ERROR:
-                self.logger.debug('%s %s prober hit error at byte %s',
-                                  self.charset_name, self.language, i)
-                self._state = ProbingState.NOT_ME
-                break
-            elif coding_state == MachineState.ITS_ME:
-                self._state = ProbingState.FOUND_IT
-                break
-            elif coding_state == MachineState.START:
-                char_len = self.coding_sm.get_current_charlen()
-                if i == 0:
-                    self._last_char[1] = byte_str[0]
-                    self.distribution_analyzer.feed(self._last_char, char_len)
-                else:
-                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
-                                                    char_len)
-
-        self._last_char[0] = byte_str[-1]
-
-        if self.state == ProbingState.DETECTING:
-            if (self.distribution_analyzer.got_enough_data() and
-                    (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
-                self._state = ProbingState.FOUND_IT
-
-        return self.state
-
-    def get_confidence(self):
-        return self.distribution_analyzer.get_confidence()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
deleted file mode 100644
index 530abe7..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcsgroupprober.py
+++ /dev/null
@@ -1,54 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#   Proofpoint, Inc.
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetgroupprober import CharSetGroupProber
-from .utf8prober import UTF8Prober
-from .sjisprober import SJISProber
-from .eucjpprober import EUCJPProber
-from .gb2312prober import GB2312Prober
-from .euckrprober import EUCKRProber
-from .cp949prober import CP949Prober
-from .big5prober import Big5Prober
-from .euctwprober import EUCTWProber
-
-
-class MBCSGroupProber(CharSetGroupProber):
-    def __init__(self, lang_filter=None):
-        super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
-        self.probers = [
-            UTF8Prober(),
-            SJISProber(),
-            EUCJPProber(),
-            GB2312Prober(),
-            EUCKRProber(),
-            CP949Prober(),
-            Big5Prober(),
-            EUCTWProber()
-        ]
-        self.reset()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py
deleted file mode 100644
index 8360d0f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/mbcssm.py
+++ /dev/null
@@ -1,572 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .enums import MachineState
-
-# BIG5
-
-BIG5_CLS = (
-    1,1,1,1,1,1,1,1,  # 00 - 07    #allow 0x00 as legal value
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,1,  # 78 - 7f
-    4,4,4,4,4,4,4,4,  # 80 - 87
-    4,4,4,4,4,4,4,4,  # 88 - 8f
-    4,4,4,4,4,4,4,4,  # 90 - 97
-    4,4,4,4,4,4,4,4,  # 98 - 9f
-    4,3,3,3,3,3,3,3,  # a0 - a7
-    3,3,3,3,3,3,3,3,  # a8 - af
-    3,3,3,3,3,3,3,3,  # b0 - b7
-    3,3,3,3,3,3,3,3,  # b8 - bf
-    3,3,3,3,3,3,3,3,  # c0 - c7
-    3,3,3,3,3,3,3,3,  # c8 - cf
-    3,3,3,3,3,3,3,3,  # d0 - d7
-    3,3,3,3,3,3,3,3,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,3,3,3,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,3,3,0  # f8 - ff
-)
-
-BIG5_ST = (
-    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
-    MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,#08-0f
-    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START#10-17
-)
-
-BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0)
-
-BIG5_SM_MODEL = {'class_table': BIG5_CLS,
-                 'class_factor': 5,
-                 'state_table': BIG5_ST,
-                 'char_len_table': BIG5_CHAR_LEN_TABLE,
-                 'name': 'Big5'}
-
-# CP949
-
-CP949_CLS  = (
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0,  # 00 - 0f
-    1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1,  # 10 - 1f
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 20 - 2f
-    1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,  # 30 - 3f
-    1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4,  # 40 - 4f
-    4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 50 - 5f
-    1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5,  # 60 - 6f
-    5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1,  # 70 - 7f
-    0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 80 - 8f
-    6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6,  # 90 - 9f
-    6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8,  # a0 - af
-    7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,  # b0 - bf
-    7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2,  # c0 - cf
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # d0 - df
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,  # e0 - ef
-    2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0,  # f0 - ff
-)
-
-CP949_ST = (
-#cls=    0      1      2      3      4      5      6      7      8      9  # previous state =
-    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.START,MachineState.START,     4,     5,MachineState.ERROR,     6, # MachineState.START
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, # MachineState.ERROR
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME, # MachineState.ITS_ME
-    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 3
-    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 4
-    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START, # 5
-    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START, # 6
-)
-
-CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
-
-CP949_SM_MODEL = {'class_table': CP949_CLS,
-                  'class_factor': 10,
-                  'state_table': CP949_ST,
-                  'char_len_table': CP949_CHAR_LEN_TABLE,
-                  'name': 'CP949'}
-
-# EUC-JP
-
-EUCJP_CLS = (
-    4,4,4,4,4,4,4,4,  # 00 - 07
-    4,4,4,4,4,4,5,5,  # 08 - 0f
-    4,4,4,4,4,4,4,4,  # 10 - 17
-    4,4,4,5,4,4,4,4,  # 18 - 1f
-    4,4,4,4,4,4,4,4,  # 20 - 27
-    4,4,4,4,4,4,4,4,  # 28 - 2f
-    4,4,4,4,4,4,4,4,  # 30 - 37
-    4,4,4,4,4,4,4,4,  # 38 - 3f
-    4,4,4,4,4,4,4,4,  # 40 - 47
-    4,4,4,4,4,4,4,4,  # 48 - 4f
-    4,4,4,4,4,4,4,4,  # 50 - 57
-    4,4,4,4,4,4,4,4,  # 58 - 5f
-    4,4,4,4,4,4,4,4,  # 60 - 67
-    4,4,4,4,4,4,4,4,  # 68 - 6f
-    4,4,4,4,4,4,4,4,  # 70 - 77
-    4,4,4,4,4,4,4,4,  # 78 - 7f
-    5,5,5,5,5,5,5,5,  # 80 - 87
-    5,5,5,5,5,5,1,3,  # 88 - 8f
-    5,5,5,5,5,5,5,5,  # 90 - 97
-    5,5,5,5,5,5,5,5,  # 98 - 9f
-    5,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,2,2,2,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,2,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,0,5  # f8 - ff
-)
-
-EUCJP_ST = (
-          3,     4,     3,     5,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
-     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-     MachineState.ITS_ME,MachineState.ITS_ME,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
-     MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     3,MachineState.ERROR,#18-1f
-          3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START#20-27
-)
-
-EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0)
-
-EUCJP_SM_MODEL = {'class_table': EUCJP_CLS,
-                  'class_factor': 6,
-                  'state_table': EUCJP_ST,
-                  'char_len_table': EUCJP_CHAR_LEN_TABLE,
-                  'name': 'EUC-JP'}
-
-# EUC-KR
-
-EUCKR_CLS  = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    1,1,1,1,1,1,1,1,  # 40 - 47
-    1,1,1,1,1,1,1,1,  # 48 - 4f
-    1,1,1,1,1,1,1,1,  # 50 - 57
-    1,1,1,1,1,1,1,1,  # 58 - 5f
-    1,1,1,1,1,1,1,1,  # 60 - 67
-    1,1,1,1,1,1,1,1,  # 68 - 6f
-    1,1,1,1,1,1,1,1,  # 70 - 77
-    1,1,1,1,1,1,1,1,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,3,3,3,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,3,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    2,2,2,2,2,2,2,2,  # e0 - e7
-    2,2,2,2,2,2,2,2,  # e8 - ef
-    2,2,2,2,2,2,2,2,  # f0 - f7
-    2,2,2,2,2,2,2,0   # f8 - ff
-)
-
-EUCKR_ST = (
-    MachineState.ERROR,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #08-0f
-)
-
-EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0)
-
-EUCKR_SM_MODEL = {'class_table': EUCKR_CLS,
-                'class_factor': 4,
-                'state_table': EUCKR_ST,
-                'char_len_table': EUCKR_CHAR_LEN_TABLE,
-                'name': 'EUC-KR'}
-
-# EUC-TW
-
-EUCTW_CLS = (
-    2,2,2,2,2,2,2,2,  # 00 - 07
-    2,2,2,2,2,2,0,0,  # 08 - 0f
-    2,2,2,2,2,2,2,2,  # 10 - 17
-    2,2,2,0,2,2,2,2,  # 18 - 1f
-    2,2,2,2,2,2,2,2,  # 20 - 27
-    2,2,2,2,2,2,2,2,  # 28 - 2f
-    2,2,2,2,2,2,2,2,  # 30 - 37
-    2,2,2,2,2,2,2,2,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,2,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,6,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,3,4,4,4,4,4,4,  # a0 - a7
-    5,5,1,1,1,1,1,1,  # a8 - af
-    1,1,1,1,1,1,1,1,  # b0 - b7
-    1,1,1,1,1,1,1,1,  # b8 - bf
-    1,1,3,1,3,3,3,3,  # c0 - c7
-    3,3,3,3,3,3,3,3,  # c8 - cf
-    3,3,3,3,3,3,3,3,  # d0 - d7
-    3,3,3,3,3,3,3,3,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,3,3,3,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,3,3,0   # f8 - ff
-)
-
-EUCTW_ST = (
-    MachineState.ERROR,MachineState.ERROR,MachineState.START,     3,     3,     3,     4,MachineState.ERROR,#00-07
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.ERROR,#10-17
-    MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
-         5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,#20-27
-    MachineState.START,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
-)
-
-EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3)
-
-EUCTW_SM_MODEL = {'class_table': EUCTW_CLS,
-                'class_factor': 7,
-                'state_table': EUCTW_ST,
-                'char_len_table': EUCTW_CHAR_LEN_TABLE,
-                'name': 'x-euc-tw'}
-
-# GB2312
-
-GB2312_CLS = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    3,3,3,3,3,3,3,3,  # 30 - 37
-    3,3,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,4,  # 78 - 7f
-    5,6,6,6,6,6,6,6,  # 80 - 87
-    6,6,6,6,6,6,6,6,  # 88 - 8f
-    6,6,6,6,6,6,6,6,  # 90 - 97
-    6,6,6,6,6,6,6,6,  # 98 - 9f
-    6,6,6,6,6,6,6,6,  # a0 - a7
-    6,6,6,6,6,6,6,6,  # a8 - af
-    6,6,6,6,6,6,6,6,  # b0 - b7
-    6,6,6,6,6,6,6,6,  # b8 - bf
-    6,6,6,6,6,6,6,6,  # c0 - c7
-    6,6,6,6,6,6,6,6,  # c8 - cf
-    6,6,6,6,6,6,6,6,  # d0 - d7
-    6,6,6,6,6,6,6,6,  # d8 - df
-    6,6,6,6,6,6,6,6,  # e0 - e7
-    6,6,6,6,6,6,6,6,  # e8 - ef
-    6,6,6,6,6,6,6,6,  # f0 - f7
-    6,6,6,6,6,6,6,0   # f8 - ff
-)
-
-GB2312_ST = (
-    MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,     3,MachineState.ERROR,#00-07
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,#10-17
-         4,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
-    MachineState.ERROR,MachineState.ERROR,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#20-27
-    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START #28-2f
-)
-
-# To be accurate, the length of class 6 can be either 2 or 4.
-# But it is not necessary to discriminate between the two since
-# it is used for frequency analysis only, and we are validating
-# each code range there as well. So it is safe to set it to be
-# 2 here.
-GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2)
-
-GB2312_SM_MODEL = {'class_table': GB2312_CLS,
-                   'class_factor': 7,
-                   'state_table': GB2312_ST,
-                   'char_len_table': GB2312_CHAR_LEN_TABLE,
-                   'name': 'GB2312'}
-
-# Shift_JIS
-
-SJIS_CLS = (
-    1,1,1,1,1,1,1,1,  # 00 - 07
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    2,2,2,2,2,2,2,2,  # 40 - 47
-    2,2,2,2,2,2,2,2,  # 48 - 4f
-    2,2,2,2,2,2,2,2,  # 50 - 57
-    2,2,2,2,2,2,2,2,  # 58 - 5f
-    2,2,2,2,2,2,2,2,  # 60 - 67
-    2,2,2,2,2,2,2,2,  # 68 - 6f
-    2,2,2,2,2,2,2,2,  # 70 - 77
-    2,2,2,2,2,2,2,1,  # 78 - 7f
-    3,3,3,3,3,2,2,3,  # 80 - 87
-    3,3,3,3,3,3,3,3,  # 88 - 8f
-    3,3,3,3,3,3,3,3,  # 90 - 97
-    3,3,3,3,3,3,3,3,  # 98 - 9f
-    #0xa0 is illegal in sjis encoding, but some pages does
-    #contain such byte. We need to be more error forgiven.
-    2,2,2,2,2,2,2,2,  # a0 - a7
-    2,2,2,2,2,2,2,2,  # a8 - af
-    2,2,2,2,2,2,2,2,  # b0 - b7
-    2,2,2,2,2,2,2,2,  # b8 - bf
-    2,2,2,2,2,2,2,2,  # c0 - c7
-    2,2,2,2,2,2,2,2,  # c8 - cf
-    2,2,2,2,2,2,2,2,  # d0 - d7
-    2,2,2,2,2,2,2,2,  # d8 - df
-    3,3,3,3,3,3,3,3,  # e0 - e7
-    3,3,3,3,3,4,4,4,  # e8 - ef
-    3,3,3,3,3,3,3,3,  # f0 - f7
-    3,3,3,3,3,0,0,0)  # f8 - ff
-
-
-SJIS_ST = (
-    MachineState.ERROR,MachineState.START,MachineState.START,     3,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#00-07
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START #10-17
-)
-
-SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0)
-
-SJIS_SM_MODEL = {'class_table': SJIS_CLS,
-               'class_factor': 6,
-               'state_table': SJIS_ST,
-               'char_len_table': SJIS_CHAR_LEN_TABLE,
-               'name': 'Shift_JIS'}
-
-# UCS2-BE
-
-UCS2BE_CLS = (
-    0,0,0,0,0,0,0,0,  # 00 - 07
-    0,0,1,0,0,2,0,0,  # 08 - 0f
-    0,0,0,0,0,0,0,0,  # 10 - 17
-    0,0,0,3,0,0,0,0,  # 18 - 1f
-    0,0,0,0,0,0,0,0,  # 20 - 27
-    0,3,3,3,3,3,0,0,  # 28 - 2f
-    0,0,0,0,0,0,0,0,  # 30 - 37
-    0,0,0,0,0,0,0,0,  # 38 - 3f
-    0,0,0,0,0,0,0,0,  # 40 - 47
-    0,0,0,0,0,0,0,0,  # 48 - 4f
-    0,0,0,0,0,0,0,0,  # 50 - 57
-    0,0,0,0,0,0,0,0,  # 58 - 5f
-    0,0,0,0,0,0,0,0,  # 60 - 67
-    0,0,0,0,0,0,0,0,  # 68 - 6f
-    0,0,0,0,0,0,0,0,  # 70 - 77
-    0,0,0,0,0,0,0,0,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,0,0,0,0,0,0,0,  # a0 - a7
-    0,0,0,0,0,0,0,0,  # a8 - af
-    0,0,0,0,0,0,0,0,  # b0 - b7
-    0,0,0,0,0,0,0,0,  # b8 - bf
-    0,0,0,0,0,0,0,0,  # c0 - c7
-    0,0,0,0,0,0,0,0,  # c8 - cf
-    0,0,0,0,0,0,0,0,  # d0 - d7
-    0,0,0,0,0,0,0,0,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,4,5   # f8 - ff
-)
-
-UCS2BE_ST  = (
-          5,     7,     7,MachineState.ERROR,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
-     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-     MachineState.ITS_ME,MachineState.ITS_ME,     6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,#10-17
-          6,     6,     6,     6,     6,MachineState.ITS_ME,     6,     6,#18-1f
-          6,     6,     6,     6,     5,     7,     7,MachineState.ERROR,#20-27
-          5,     8,     6,     6,MachineState.ERROR,     6,     6,     6,#28-2f
-          6,     6,     6,     6,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START #30-37
-)
-
-UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2)
-
-UCS2BE_SM_MODEL = {'class_table': UCS2BE_CLS,
-                   'class_factor': 6,
-                   'state_table': UCS2BE_ST,
-                   'char_len_table': UCS2BE_CHAR_LEN_TABLE,
-                   'name': 'UTF-16BE'}
-
-# UCS2-LE
-
-UCS2LE_CLS = (
-    0,0,0,0,0,0,0,0,  # 00 - 07
-    0,0,1,0,0,2,0,0,  # 08 - 0f
-    0,0,0,0,0,0,0,0,  # 10 - 17
-    0,0,0,3,0,0,0,0,  # 18 - 1f
-    0,0,0,0,0,0,0,0,  # 20 - 27
-    0,3,3,3,3,3,0,0,  # 28 - 2f
-    0,0,0,0,0,0,0,0,  # 30 - 37
-    0,0,0,0,0,0,0,0,  # 38 - 3f
-    0,0,0,0,0,0,0,0,  # 40 - 47
-    0,0,0,0,0,0,0,0,  # 48 - 4f
-    0,0,0,0,0,0,0,0,  # 50 - 57
-    0,0,0,0,0,0,0,0,  # 58 - 5f
-    0,0,0,0,0,0,0,0,  # 60 - 67
-    0,0,0,0,0,0,0,0,  # 68 - 6f
-    0,0,0,0,0,0,0,0,  # 70 - 77
-    0,0,0,0,0,0,0,0,  # 78 - 7f
-    0,0,0,0,0,0,0,0,  # 80 - 87
-    0,0,0,0,0,0,0,0,  # 88 - 8f
-    0,0,0,0,0,0,0,0,  # 90 - 97
-    0,0,0,0,0,0,0,0,  # 98 - 9f
-    0,0,0,0,0,0,0,0,  # a0 - a7
-    0,0,0,0,0,0,0,0,  # a8 - af
-    0,0,0,0,0,0,0,0,  # b0 - b7
-    0,0,0,0,0,0,0,0,  # b8 - bf
-    0,0,0,0,0,0,0,0,  # c0 - c7
-    0,0,0,0,0,0,0,0,  # c8 - cf
-    0,0,0,0,0,0,0,0,  # d0 - d7
-    0,0,0,0,0,0,0,0,  # d8 - df
-    0,0,0,0,0,0,0,0,  # e0 - e7
-    0,0,0,0,0,0,0,0,  # e8 - ef
-    0,0,0,0,0,0,0,0,  # f0 - f7
-    0,0,0,0,0,0,4,5   # f8 - ff
-)
-
-UCS2LE_ST = (
-          6,     6,     7,     6,     4,     3,MachineState.ERROR,MachineState.ERROR,#00-07
-     MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#08-0f
-     MachineState.ITS_ME,MachineState.ITS_ME,     5,     5,     5,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,#10-17
-          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,     6,     6,#18-1f
-          7,     6,     8,     8,     5,     5,     5,MachineState.ERROR,#20-27
-          5,     5,     5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,#28-2f
-          5,     5,     5,MachineState.ERROR,     5,MachineState.ERROR,MachineState.START,MachineState.START #30-37
-)
-
-UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2)
-
-UCS2LE_SM_MODEL = {'class_table': UCS2LE_CLS,
-                 'class_factor': 6,
-                 'state_table': UCS2LE_ST,
-                 'char_len_table': UCS2LE_CHAR_LEN_TABLE,
-                 'name': 'UTF-16LE'}
-
-# UTF-8
-
-UTF8_CLS = (
-    1,1,1,1,1,1,1,1,  # 00 - 07  #allow 0x00 as a legal value
-    1,1,1,1,1,1,0,0,  # 08 - 0f
-    1,1,1,1,1,1,1,1,  # 10 - 17
-    1,1,1,0,1,1,1,1,  # 18 - 1f
-    1,1,1,1,1,1,1,1,  # 20 - 27
-    1,1,1,1,1,1,1,1,  # 28 - 2f
-    1,1,1,1,1,1,1,1,  # 30 - 37
-    1,1,1,1,1,1,1,1,  # 38 - 3f
-    1,1,1,1,1,1,1,1,  # 40 - 47
-    1,1,1,1,1,1,1,1,  # 48 - 4f
-    1,1,1,1,1,1,1,1,  # 50 - 57
-    1,1,1,1,1,1,1,1,  # 58 - 5f
-    1,1,1,1,1,1,1,1,  # 60 - 67
-    1,1,1,1,1,1,1,1,  # 68 - 6f
-    1,1,1,1,1,1,1,1,  # 70 - 77
-    1,1,1,1,1,1,1,1,  # 78 - 7f
-    2,2,2,2,3,3,3,3,  # 80 - 87
-    4,4,4,4,4,4,4,4,  # 88 - 8f
-    4,4,4,4,4,4,4,4,  # 90 - 97
-    4,4,4,4,4,4,4,4,  # 98 - 9f
-    5,5,5,5,5,5,5,5,  # a0 - a7
-    5,5,5,5,5,5,5,5,  # a8 - af
-    5,5,5,5,5,5,5,5,  # b0 - b7
-    5,5,5,5,5,5,5,5,  # b8 - bf
-    0,0,6,6,6,6,6,6,  # c0 - c7
-    6,6,6,6,6,6,6,6,  # c8 - cf
-    6,6,6,6,6,6,6,6,  # d0 - d7
-    6,6,6,6,6,6,6,6,  # d8 - df
-    7,8,8,8,8,8,8,8,  # e0 - e7
-    8,8,8,8,8,9,8,8,  # e8 - ef
-    10,11,11,11,11,11,11,11,  # f0 - f7
-    12,13,13,13,14,15,0,0    # f8 - ff
-)
-
-UTF8_ST = (
-    MachineState.ERROR,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     12,   10,#00-07
-         9,     11,     8,     7,     6,     5,     4,    3,#08-0f
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#10-17
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#18-1f
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#20-27
-    MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,#28-2f
-    MachineState.ERROR,MachineState.ERROR,     5,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#30-37
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#38-3f
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     5,     5,     5,MachineState.ERROR,MachineState.ERROR,#40-47
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#48-4f
-    MachineState.ERROR,MachineState.ERROR,     7,     7,     7,     7,MachineState.ERROR,MachineState.ERROR,#50-57
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#58-5f
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     7,     7,MachineState.ERROR,MachineState.ERROR,#60-67
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#68-6f
-    MachineState.ERROR,MachineState.ERROR,     9,     9,     9,     9,MachineState.ERROR,MachineState.ERROR,#70-77
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#78-7f
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,     9,MachineState.ERROR,MachineState.ERROR,#80-87
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#88-8f
-    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,#90-97
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#98-9f
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,    12,MachineState.ERROR,MachineState.ERROR,#a0-a7
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#a8-af
-    MachineState.ERROR,MachineState.ERROR,    12,    12,    12,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b0-b7
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,#b8-bf
-    MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,#c0-c7
-    MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR #c8-cf
-)
-
-UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
-
-UTF8_SM_MODEL = {'class_table': UTF8_CLS,
-                 'class_factor': 16,
-                 'state_table': UTF8_ST,
-                 'char_len_table': UTF8_CHAR_LEN_TABLE,
-                 'name': 'UTF-8'}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py
deleted file mode 100644
index 0adb51d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcharsetprober.py
+++ /dev/null
@@ -1,132 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .enums import CharacterCategory, ProbingState, SequenceLikelihood
-
-
-class SingleByteCharSetProber(CharSetProber):
-    SAMPLE_SIZE = 64
-    SB_ENOUGH_REL_THRESHOLD = 1024  #  0.25 * SAMPLE_SIZE^2
-    POSITIVE_SHORTCUT_THRESHOLD = 0.95
-    NEGATIVE_SHORTCUT_THRESHOLD = 0.05
-
-    def __init__(self, model, reversed=False, name_prober=None):
-        super(SingleByteCharSetProber, self).__init__()
-        self._model = model
-        # TRUE if we need to reverse every pair in the model lookup
-        self._reversed = reversed
-        # Optional auxiliary prober for name decision
-        self._name_prober = name_prober
-        self._last_order = None
-        self._seq_counters = None
-        self._total_seqs = None
-        self._total_char = None
-        self._freq_char = None
-        self.reset()
-
-    def reset(self):
-        super(SingleByteCharSetProber, self).reset()
-        # char order of last character
-        self._last_order = 255
-        self._seq_counters = [0] * SequenceLikelihood.get_num_categories()
-        self._total_seqs = 0
-        self._total_char = 0
-        # characters that fall in our sampling range
-        self._freq_char = 0
-
-    @property
-    def charset_name(self):
-        if self._name_prober:
-            return self._name_prober.charset_name
-        else:
-            return self._model['charset_name']
-
-    @property
-    def language(self):
-        if self._name_prober:
-            return self._name_prober.language
-        else:
-            return self._model.get('language')
-
-    def feed(self, byte_str):
-        if not self._model['keep_english_letter']:
-            byte_str = self.filter_international_words(byte_str)
-        if not byte_str:
-            return self.state
-        char_to_order_map = self._model['char_to_order_map']
-        for i, c in enumerate(byte_str):
-            # XXX: Order is in range 1-64, so one would think we want 0-63 here,
-            #      but that leads to 27 more test failures than before.
-            order = char_to_order_map[c]
-            # XXX: This was SYMBOL_CAT_ORDER before, with a value of 250, but
-            #      CharacterCategory.SYMBOL is actually 253, so we use CONTROL
-            #      to make it closer to the original intent. The only difference
-            #      is whether or not we count digits and control characters for
-            #      _total_char purposes.
-            if order < CharacterCategory.CONTROL:
-                self._total_char += 1
-            if order < self.SAMPLE_SIZE:
-                self._freq_char += 1
-                if self._last_order < self.SAMPLE_SIZE:
-                    self._total_seqs += 1
-                    if not self._reversed:
-                        i = (self._last_order * self.SAMPLE_SIZE) + order
-                        model = self._model['precedence_matrix'][i]
-                    else:  # reverse the order of the letters in the lookup
-                        i = (order * self.SAMPLE_SIZE) + self._last_order
-                        model = self._model['precedence_matrix'][i]
-                    self._seq_counters[model] += 1
-            self._last_order = order
-
-        charset_name = self._model['charset_name']
-        if self.state == ProbingState.DETECTING:
-            if self._total_seqs > self.SB_ENOUGH_REL_THRESHOLD:
-                confidence = self.get_confidence()
-                if confidence > self.POSITIVE_SHORTCUT_THRESHOLD:
-                    self.logger.debug('%s confidence = %s, we have a winner',
-                                      charset_name, confidence)
-                    self._state = ProbingState.FOUND_IT
-                elif confidence < self.NEGATIVE_SHORTCUT_THRESHOLD:
-                    self.logger.debug('%s confidence = %s, below negative '
-                                      'shortcut threshhold %s', charset_name,
-                                      confidence,
-                                      self.NEGATIVE_SHORTCUT_THRESHOLD)
-                    self._state = ProbingState.NOT_ME
-
-        return self.state
-
-    def get_confidence(self):
-        r = 0.01
-        if self._total_seqs > 0:
-            r = ((1.0 * self._seq_counters[SequenceLikelihood.POSITIVE]) /
-                 self._total_seqs / self._model['typical_positive_ratio'])
-            r = r * self._freq_char / self._total_char
-            if r >= 1.0:
-                r = 0.99
-        return r
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
deleted file mode 100644
index 98e95dc..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sbcsgroupprober.py
+++ /dev/null
@@ -1,73 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetgroupprober import CharSetGroupProber
-from .sbcharsetprober import SingleByteCharSetProber
-from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
-                                Latin5CyrillicModel, MacCyrillicModel,
-                                Ibm866Model, Ibm855Model)
-from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
-from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
-# from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
-from .langthaimodel import TIS620ThaiModel
-from .langhebrewmodel import Win1255HebrewModel
-from .hebrewprober import HebrewProber
-from .langturkishmodel import Latin5TurkishModel
-
-
-class SBCSGroupProber(CharSetGroupProber):
-    def __init__(self):
-        super(SBCSGroupProber, self).__init__()
-        self.probers = [
-            SingleByteCharSetProber(Win1251CyrillicModel),
-            SingleByteCharSetProber(Koi8rModel),
-            SingleByteCharSetProber(Latin5CyrillicModel),
-            SingleByteCharSetProber(MacCyrillicModel),
-            SingleByteCharSetProber(Ibm866Model),
-            SingleByteCharSetProber(Ibm855Model),
-            SingleByteCharSetProber(Latin7GreekModel),
-            SingleByteCharSetProber(Win1253GreekModel),
-            SingleByteCharSetProber(Latin5BulgarianModel),
-            SingleByteCharSetProber(Win1251BulgarianModel),
-            # TODO: Restore Hungarian encodings (iso-8859-2 and windows-1250)
-            #       after we retrain model.
-            # SingleByteCharSetProber(Latin2HungarianModel),
-            # SingleByteCharSetProber(Win1250HungarianModel),
-            SingleByteCharSetProber(TIS620ThaiModel),
-            SingleByteCharSetProber(Latin5TurkishModel),
-        ]
-        hebrew_prober = HebrewProber()
-        logical_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel,
-                                                        False, hebrew_prober)
-        visual_hebrew_prober = SingleByteCharSetProber(Win1255HebrewModel, True,
-                                                       hebrew_prober)
-        hebrew_prober.set_model_probers(logical_hebrew_prober, visual_hebrew_prober)
-        self.probers.extend([hebrew_prober, logical_hebrew_prober,
-                             visual_hebrew_prober])
-
-        self.reset()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py
deleted file mode 100644
index 9e29623..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/sjisprober.py
+++ /dev/null
@@ -1,92 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .mbcharsetprober import MultiByteCharSetProber
-from .codingstatemachine import CodingStateMachine
-from .chardistribution import SJISDistributionAnalysis
-from .jpcntx import SJISContextAnalysis
-from .mbcssm import SJIS_SM_MODEL
-from .enums import ProbingState, MachineState
-
-
-class SJISProber(MultiByteCharSetProber):
-    def __init__(self):
-        super(SJISProber, self).__init__()
-        self.coding_sm = CodingStateMachine(SJIS_SM_MODEL)
-        self.distribution_analyzer = SJISDistributionAnalysis()
-        self.context_analyzer = SJISContextAnalysis()
-        self.reset()
-
-    def reset(self):
-        super(SJISProber, self).reset()
-        self.context_analyzer.reset()
-
-    @property
-    def charset_name(self):
-        return self.context_analyzer.charset_name
-
-    @property
-    def language(self):
-        return "Japanese"
-
-    def feed(self, byte_str):
-        for i in range(len(byte_str)):
-            coding_state = self.coding_sm.next_state(byte_str[i])
-            if coding_state == MachineState.ERROR:
-                self.logger.debug('%s %s prober hit error at byte %s',
-                                  self.charset_name, self.language, i)
-                self._state = ProbingState.NOT_ME
-                break
-            elif coding_state == MachineState.ITS_ME:
-                self._state = ProbingState.FOUND_IT
-                break
-            elif coding_state == MachineState.START:
-                char_len = self.coding_sm.get_current_charlen()
-                if i == 0:
-                    self._last_char[1] = byte_str[0]
-                    self.context_analyzer.feed(self._last_char[2 - char_len:],
-                                               char_len)
-                    self.distribution_analyzer.feed(self._last_char, char_len)
-                else:
-                    self.context_analyzer.feed(byte_str[i + 1 - char_len:i + 3
-                                                        - char_len], char_len)
-                    self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
-                                                    char_len)
-
-        self._last_char[0] = byte_str[-1]
-
-        if self.state == ProbingState.DETECTING:
-            if (self.context_analyzer.got_enough_data() and
-               (self.get_confidence() > self.SHORTCUT_THRESHOLD)):
-                self._state = ProbingState.FOUND_IT
-
-        return self.state
-
-    def get_confidence(self):
-        context_conf = self.context_analyzer.get_confidence()
-        distrib_conf = self.distribution_analyzer.get_confidence()
-        return max(context_conf, distrib_conf)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py
deleted file mode 100644
index 7b4e92d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/universaldetector.py
+++ /dev/null
@@ -1,286 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is Mozilla Universal charset detector code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 2001
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#   Shy Shalom - original C code
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-"""
-Module containing the UniversalDetector detector class, which is the primary
-class a user of ``chardet`` should use.
-
-:author: Mark Pilgrim (initial port to Python)
-:author: Shy Shalom (original C code)
-:author: Dan Blanchard (major refactoring for 3.0)
-:author: Ian Cordasco
-"""
-
-
-import codecs
-import logging
-import re
-
-from .charsetgroupprober import CharSetGroupProber
-from .enums import InputState, LanguageFilter, ProbingState
-from .escprober import EscCharSetProber
-from .latin1prober import Latin1Prober
-from .mbcsgroupprober import MBCSGroupProber
-from .sbcsgroupprober import SBCSGroupProber
-
-
-class UniversalDetector(object):
-    """
-    The ``UniversalDetector`` class underlies the ``chardet.detect`` function
-    and coordinates all of the different charset probers.
-
-    To get a ``dict`` containing an encoding and its confidence, you can simply
-    run:
-
-    .. code::
-
-            u = UniversalDetector()
-            u.feed(some_bytes)
-            u.close()
-            detected = u.result
-
-    """
-
-    MINIMUM_THRESHOLD = 0.20
-    HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
-    ESC_DETECTOR = re.compile(b'(\033|~{)')
-    WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
-    ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
-                   'iso-8859-2': 'Windows-1250',
-                   'iso-8859-5': 'Windows-1251',
-                   'iso-8859-6': 'Windows-1256',
-                   'iso-8859-7': 'Windows-1253',
-                   'iso-8859-8': 'Windows-1255',
-                   'iso-8859-9': 'Windows-1254',
-                   'iso-8859-13': 'Windows-1257'}
-
-    def __init__(self, lang_filter=LanguageFilter.ALL):
-        self._esc_charset_prober = None
-        self._charset_probers = []
-        self.result = None
-        self.done = None
-        self._got_data = None
-        self._input_state = None
-        self._last_char = None
-        self.lang_filter = lang_filter
-        self.logger = logging.getLogger(__name__)
-        self._has_win_bytes = None
-        self.reset()
-
-    def reset(self):
-        """
-        Reset the UniversalDetector and all of its probers back to their
-        initial states.  This is called by ``__init__``, so you only need to
-        call this directly in between analyses of different documents.
-        """
-        self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
-        self.done = False
-        self._got_data = False
-        self._has_win_bytes = False
-        self._input_state = InputState.PURE_ASCII
-        self._last_char = b''
-        if self._esc_charset_prober:
-            self._esc_charset_prober.reset()
-        for prober in self._charset_probers:
-            prober.reset()
-
-    def feed(self, byte_str):
-        """
-        Takes a chunk of a document and feeds it through all of the relevant
-        charset probers.
-
-        After calling ``feed``, you can check the value of the ``done``
-        attribute to see if you need to continue feeding the
-        ``UniversalDetector`` more data, or if it has made a prediction
-        (in the ``result`` attribute).
-
-        .. note::
-           You should always call ``close`` when you're done feeding in your
-           document if ``done`` is not already ``True``.
-        """
-        if self.done:
-            return
-
-        if not len(byte_str):
-            return
-
-        if not isinstance(byte_str, bytearray):
-            byte_str = bytearray(byte_str)
-
-        # First check for known BOMs, since these are guaranteed to be correct
-        if not self._got_data:
-            # If the data starts with BOM, we know it is UTF
-            if byte_str.startswith(codecs.BOM_UTF8):
-                # EF BB BF  UTF-8 with BOM
-                self.result = {'encoding': "UTF-8-SIG",
-                               'confidence': 1.0,
-                               'language': ''}
-            elif byte_str.startswith((codecs.BOM_UTF32_LE,
-                                      codecs.BOM_UTF32_BE)):
-                # FF FE 00 00  UTF-32, little-endian BOM
-                # 00 00 FE FF  UTF-32, big-endian BOM
-                self.result = {'encoding': "UTF-32",
-                               'confidence': 1.0,
-                               'language': ''}
-            elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
-                # FE FF 00 00  UCS-4, unusual octet order BOM (3412)
-                self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
-                               'confidence': 1.0,
-                               'language': ''}
-            elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
-                # 00 00 FF FE  UCS-4, unusual octet order BOM (2143)
-                self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
-                               'confidence': 1.0,
-                               'language': ''}
-            elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
-                # FF FE  UTF-16, little endian BOM
-                # FE FF  UTF-16, big endian BOM
-                self.result = {'encoding': "UTF-16",
-                               'confidence': 1.0,
-                               'language': ''}
-
-            self._got_data = True
-            if self.result['encoding'] is not None:
-                self.done = True
-                return
-
-        # If none of those matched and we've only see ASCII so far, check
-        # for high bytes and escape sequences
-        if self._input_state == InputState.PURE_ASCII:
-            if self.HIGH_BYTE_DETECTOR.search(byte_str):
-                self._input_state = InputState.HIGH_BYTE
-            elif self._input_state == InputState.PURE_ASCII and \
-                    self.ESC_DETECTOR.search(self._last_char + byte_str):
-                self._input_state = InputState.ESC_ASCII
-
-        self._last_char = byte_str[-1:]
-
-        # If we've seen escape sequences, use the EscCharSetProber, which
-        # uses a simple state machine to check for known escape sequences in
-        # HZ and ISO-2022 encodings, since those are the only encodings that
-        # use such sequences.
-        if self._input_state == InputState.ESC_ASCII:
-            if not self._esc_charset_prober:
-                self._esc_charset_prober = EscCharSetProber(self.lang_filter)
-            if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
-                self.result = {'encoding':
-                               self._esc_charset_prober.charset_name,
-                               'confidence':
-                               self._esc_charset_prober.get_confidence(),
-                               'language':
-                               self._esc_charset_prober.language}
-                self.done = True
-        # If we've seen high bytes (i.e., those with values greater than 127),
-        # we need to do more complicated checks using all our multi-byte and
-        # single-byte probers that are left.  The single-byte probers
-        # use character bigram distributions to determine the encoding, whereas
-        # the multi-byte probers use a combination of character unigram and
-        # bigram distributions.
-        elif self._input_state == InputState.HIGH_BYTE:
-            if not self._charset_probers:
-                self._charset_probers = [MBCSGroupProber(self.lang_filter)]
-                # If we're checking non-CJK encodings, use single-byte prober
-                if self.lang_filter & LanguageFilter.NON_CJK:
-                    self._charset_probers.append(SBCSGroupProber())
-                self._charset_probers.append(Latin1Prober())
-            for prober in self._charset_probers:
-                if prober.feed(byte_str) == ProbingState.FOUND_IT:
-                    self.result = {'encoding': prober.charset_name,
-                                   'confidence': prober.get_confidence(),
-                                   'language': prober.language}
-                    self.done = True
-                    break
-            if self.WIN_BYTE_DETECTOR.search(byte_str):
-                self._has_win_bytes = True
-
-    def close(self):
-        """
-        Stop analyzing the current document and come up with a final
-        prediction.
-
-        :returns:  The ``result`` attribute, a ``dict`` with the keys
-                   `encoding`, `confidence`, and `language`.
-        """
-        # Don't bother with checks if we're already done
-        if self.done:
-            return self.result
-        self.done = True
-
-        if not self._got_data:
-            self.logger.debug('no data received!')
-
-        # Default to ASCII if it is all we've seen so far
-        elif self._input_state == InputState.PURE_ASCII:
-            self.result = {'encoding': 'ascii',
-                           'confidence': 1.0,
-                           'language': ''}
-
-        # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
-        elif self._input_state == InputState.HIGH_BYTE:
-            prober_confidence = None
-            max_prober_confidence = 0.0
-            max_prober = None
-            for prober in self._charset_probers:
-                if not prober:
-                    continue
-                prober_confidence = prober.get_confidence()
-                if prober_confidence > max_prober_confidence:
-                    max_prober_confidence = prober_confidence
-                    max_prober = prober
-            if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
-                charset_name = max_prober.charset_name
-                lower_charset_name = max_prober.charset_name.lower()
-                confidence = max_prober.get_confidence()
-                # Use Windows encoding name instead of ISO-8859 if we saw any
-                # extra Windows-specific bytes
-                if lower_charset_name.startswith('iso-8859'):
-                    if self._has_win_bytes:
-                        charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
-                                                            charset_name)
-                self.result = {'encoding': charset_name,
-                               'confidence': confidence,
-                               'language': max_prober.language}
-
-        # Log all prober confidences if none met MINIMUM_THRESHOLD
-        if self.logger.getEffectiveLevel() == logging.DEBUG:
-            if self.result['encoding'] is None:
-                self.logger.debug('no probers hit minimum threshold')
-                for group_prober in self._charset_probers:
-                    if not group_prober:
-                        continue
-                    if isinstance(group_prober, CharSetGroupProber):
-                        for prober in group_prober.probers:
-                            self.logger.debug('%s %s confidence = %s',
-                                              prober.charset_name,
-                                              prober.language,
-                                              prober.get_confidence())
-                    else:
-                        self.logger.debug('%s %s confidence = %s',
-                                          prober.charset_name,
-                                          prober.language,
-                                          prober.get_confidence())
-        return self.result
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py
deleted file mode 100644
index 6c3196c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/utf8prober.py
+++ /dev/null
@@ -1,82 +0,0 @@
-######################## BEGIN LICENSE BLOCK ########################
-# The Original Code is mozilla.org code.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1998
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#   Mark Pilgrim - port to Python
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
-# 02110-1301  USA
-######################### END LICENSE BLOCK #########################
-
-from .charsetprober import CharSetProber
-from .enums import ProbingState, MachineState
-from .codingstatemachine import CodingStateMachine
-from .mbcssm import UTF8_SM_MODEL
-
-
-
-class UTF8Prober(CharSetProber):
-    ONE_CHAR_PROB = 0.5
-
-    def __init__(self):
-        super(UTF8Prober, self).__init__()
-        self.coding_sm = CodingStateMachine(UTF8_SM_MODEL)
-        self._num_mb_chars = None
-        self.reset()
-
-    def reset(self):
-        super(UTF8Prober, self).reset()
-        self.coding_sm.reset()
-        self._num_mb_chars = 0
-
-    @property
-    def charset_name(self):
-        return "utf-8"
-
-    @property
-    def language(self):
-        return ""
-
-    def feed(self, byte_str):
-        for c in byte_str:
-            coding_state = self.coding_sm.next_state(c)
-            if coding_state == MachineState.ERROR:
-                self._state = ProbingState.NOT_ME
-                break
-            elif coding_state == MachineState.ITS_ME:
-                self._state = ProbingState.FOUND_IT
-                break
-            elif coding_state == MachineState.START:
-                if self.coding_sm.get_current_charlen() >= 2:
-                    self._num_mb_chars += 1
-
-        if self.state == ProbingState.DETECTING:
-            if self.get_confidence() > self.SHORTCUT_THRESHOLD:
-                self._state = ProbingState.FOUND_IT
-
-        return self.state
-
-    def get_confidence(self):
-        unlike = 0.99
-        if self._num_mb_chars < 6:
-            unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars
-            return 1.0 - unlike
-        else:
-            return unlike
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/version.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/version.py
deleted file mode 100644
index bb2a34a..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/chardet/version.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""
-This module exists only to simplify retrieving the version number of chardet
-from within setup.py and from chardet subpackages.
-
-:author: Dan Blanchard (dan.blanchard@gmail.com)
-"""
-
-__version__ = "3.0.4"
-VERSION = __version__.split('.')
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py
deleted file mode 100644
index 2a3bf47..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-from .initialise import init, deinit, reinit, colorama_text
-from .ansi import Fore, Back, Style, Cursor
-from .ansitowin32 import AnsiToWin32
-
-__version__ = '0.4.1'
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py
deleted file mode 100644
index 7877658..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansi.py
+++ /dev/null
@@ -1,102 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-'''
-This module generates ANSI character codes to printing colors to terminals.
-See: http://en.wikipedia.org/wiki/ANSI_escape_code
-'''
-
-CSI = '\033['
-OSC = '\033]'
-BEL = '\007'
-
-
-def code_to_chars(code):
-    return CSI + str(code) + 'm'
-
-def set_title(title):
-    return OSC + '2;' + title + BEL
-
-def clear_screen(mode=2):
-    return CSI + str(mode) + 'J'
-
-def clear_line(mode=2):
-    return CSI + str(mode) + 'K'
-
-
-class AnsiCodes(object):
-    def __init__(self):
-        # the subclasses declare class attributes which are numbers.
-        # Upon instantiation we define instance attributes, which are the same
-        # as the class attributes but wrapped with the ANSI escape sequence
-        for name in dir(self):
-            if not name.startswith('_'):
-                value = getattr(self, name)
-                setattr(self, name, code_to_chars(value))
-
-
-class AnsiCursor(object):
-    def UP(self, n=1):
-        return CSI + str(n) + 'A'
-    def DOWN(self, n=1):
-        return CSI + str(n) + 'B'
-    def FORWARD(self, n=1):
-        return CSI + str(n) + 'C'
-    def BACK(self, n=1):
-        return CSI + str(n) + 'D'
-    def POS(self, x=1, y=1):
-        return CSI + str(y) + ';' + str(x) + 'H'
-
-
-class AnsiFore(AnsiCodes):
-    BLACK           = 30
-    RED             = 31
-    GREEN           = 32
-    YELLOW          = 33
-    BLUE            = 34
-    MAGENTA         = 35
-    CYAN            = 36
-    WHITE           = 37
-    RESET           = 39
-
-    # These are fairly well supported, but not part of the standard.
-    LIGHTBLACK_EX   = 90
-    LIGHTRED_EX     = 91
-    LIGHTGREEN_EX   = 92
-    LIGHTYELLOW_EX  = 93
-    LIGHTBLUE_EX    = 94
-    LIGHTMAGENTA_EX = 95
-    LIGHTCYAN_EX    = 96
-    LIGHTWHITE_EX   = 97
-
-
-class AnsiBack(AnsiCodes):
-    BLACK           = 40
-    RED             = 41
-    GREEN           = 42
-    YELLOW          = 43
-    BLUE            = 44
-    MAGENTA         = 45
-    CYAN            = 46
-    WHITE           = 47
-    RESET           = 49
-
-    # These are fairly well supported, but not part of the standard.
-    LIGHTBLACK_EX   = 100
-    LIGHTRED_EX     = 101
-    LIGHTGREEN_EX   = 102
-    LIGHTYELLOW_EX  = 103
-    LIGHTBLUE_EX    = 104
-    LIGHTMAGENTA_EX = 105
-    LIGHTCYAN_EX    = 106
-    LIGHTWHITE_EX   = 107
-
-
-class AnsiStyle(AnsiCodes):
-    BRIGHT    = 1
-    DIM       = 2
-    NORMAL    = 22
-    RESET_ALL = 0
-
-Fore   = AnsiFore()
-Back   = AnsiBack()
-Style  = AnsiStyle()
-Cursor = AnsiCursor()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py
deleted file mode 100644
index 359c92b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/ansitowin32.py
+++ /dev/null
@@ -1,257 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-import re
-import sys
-import os
-
-from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
-from .winterm import WinTerm, WinColor, WinStyle
-from .win32 import windll, winapi_test
-
-
-winterm = None
-if windll is not None:
-    winterm = WinTerm()
-
-
-class StreamWrapper(object):
-    '''
-    Wraps a stream (such as stdout), acting as a transparent proxy for all
-    attribute access apart from method 'write()', which is delegated to our
-    Converter instance.
-    '''
-    def __init__(self, wrapped, converter):
-        # double-underscore everything to prevent clashes with names of
-        # attributes on the wrapped stream object.
-        self.__wrapped = wrapped
-        self.__convertor = converter
-
-    def __getattr__(self, name):
-        return getattr(self.__wrapped, name)
-
-    def __enter__(self, *args, **kwargs):
-        # special method lookup bypasses __getattr__/__getattribute__, see
-        # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
-        # thus, contextlib magic methods are not proxied via __getattr__
-        return self.__wrapped.__enter__(*args, **kwargs)
-
-    def __exit__(self, *args, **kwargs):
-        return self.__wrapped.__exit__(*args, **kwargs)
-
-    def write(self, text):
-        self.__convertor.write(text)
-
-    def isatty(self):
-        stream = self.__wrapped
-        if 'PYCHARM_HOSTED' in os.environ:
-            if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
-                return True
-        try:
-            stream_isatty = stream.isatty
-        except AttributeError:
-            return False
-        else:
-            return stream_isatty()
-
-    @property
-    def closed(self):
-        stream = self.__wrapped
-        try:
-            return stream.closed
-        except AttributeError:
-            return True
-
-
-class AnsiToWin32(object):
-    '''
-    Implements a 'write()' method which, on Windows, will strip ANSI character
-    sequences from the text, and if outputting to a tty, will convert them into
-    win32 function calls.
-    '''
-    ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?')   # Control Sequence Introducer
-    ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?')        # Operating System Command
-
-    def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
-        # The wrapped stream (normally sys.stdout or sys.stderr)
-        self.wrapped = wrapped
-
-        # should we reset colors to defaults after every .write()
-        self.autoreset = autoreset
-
-        # create the proxy wrapping our output stream
-        self.stream = StreamWrapper(wrapped, self)
-
-        on_windows = os.name == 'nt'
-        # We test if the WinAPI works, because even if we are on Windows
-        # we may be using a terminal that doesn't support the WinAPI
-        # (e.g. Cygwin Terminal). In this case it's up to the terminal
-        # to support the ANSI codes.
-        conversion_supported = on_windows and winapi_test()
-
-        # should we strip ANSI sequences from our output?
-        if strip is None:
-            strip = conversion_supported or (not self.stream.closed and not self.stream.isatty())
-        self.strip = strip
-
-        # should we should convert ANSI sequences into win32 calls?
-        if convert is None:
-            convert = conversion_supported and not self.stream.closed and self.stream.isatty()
-        self.convert = convert
-
-        # dict of ansi codes to win32 functions and parameters
-        self.win32_calls = self.get_win32_calls()
-
-        # are we wrapping stderr?
-        self.on_stderr = self.wrapped is sys.stderr
-
-    def should_wrap(self):
-        '''
-        True if this class is actually needed. If false, then the output
-        stream will not be affected, nor will win32 calls be issued, so
-        wrapping stdout is not actually required. This will generally be
-        False on non-Windows platforms, unless optional functionality like
-        autoreset has been requested using kwargs to init()
-        '''
-        return self.convert or self.strip or self.autoreset
-
-    def get_win32_calls(self):
-        if self.convert and winterm:
-            return {
-                AnsiStyle.RESET_ALL: (winterm.reset_all, ),
-                AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
-                AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
-                AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
-                AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
-                AnsiFore.RED: (winterm.fore, WinColor.RED),
-                AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
-                AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
-                AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
-                AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
-                AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
-                AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
-                AnsiFore.RESET: (winterm.fore, ),
-                AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
-                AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
-                AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
-                AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
-                AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
-                AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
-                AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
-                AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
-                AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
-                AnsiBack.RED: (winterm.back, WinColor.RED),
-                AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
-                AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
-                AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
-                AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
-                AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
-                AnsiBack.WHITE: (winterm.back, WinColor.GREY),
-                AnsiBack.RESET: (winterm.back, ),
-                AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
-                AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
-                AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
-                AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
-                AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
-                AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
-                AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
-                AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
-            }
-        return dict()
-
-    def write(self, text):
-        if self.strip or self.convert:
-            self.write_and_convert(text)
-        else:
-            self.wrapped.write(text)
-            self.wrapped.flush()
-        if self.autoreset:
-            self.reset_all()
-
-
-    def reset_all(self):
-        if self.convert:
-            self.call_win32('m', (0,))
-        elif not self.strip and not self.stream.closed:
-            self.wrapped.write(Style.RESET_ALL)
-
-
-    def write_and_convert(self, text):
-        '''
-        Write the given text to our wrapped stream, stripping any ANSI
-        sequences from the text, and optionally converting them into win32
-        calls.
-        '''
-        cursor = 0
-        text = self.convert_osc(text)
-        for match in self.ANSI_CSI_RE.finditer(text):
-            start, end = match.span()
-            self.write_plain_text(text, cursor, start)
-            self.convert_ansi(*match.groups())
-            cursor = end
-        self.write_plain_text(text, cursor, len(text))
-
-
-    def write_plain_text(self, text, start, end):
-        if start < end:
-            self.wrapped.write(text[start:end])
-            self.wrapped.flush()
-
-
-    def convert_ansi(self, paramstring, command):
-        if self.convert:
-            params = self.extract_params(command, paramstring)
-            self.call_win32(command, params)
-
-
-    def extract_params(self, command, paramstring):
-        if command in 'Hf':
-            params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
-            while len(params) < 2:
-                # defaults:
-                params = params + (1,)
-        else:
-            params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
-            if len(params) == 0:
-                # defaults:
-                if command in 'JKm':
-                    params = (0,)
-                elif command in 'ABCD':
-                    params = (1,)
-
-        return params
-
-
-    def call_win32(self, command, params):
-        if command == 'm':
-            for param in params:
-                if param in self.win32_calls:
-                    func_args = self.win32_calls[param]
-                    func = func_args[0]
-                    args = func_args[1:]
-                    kwargs = dict(on_stderr=self.on_stderr)
-                    func(*args, **kwargs)
-        elif command in 'J':
-            winterm.erase_screen(params[0], on_stderr=self.on_stderr)
-        elif command in 'K':
-            winterm.erase_line(params[0], on_stderr=self.on_stderr)
-        elif command in 'Hf':     # cursor position - absolute
-            winterm.set_cursor_position(params, on_stderr=self.on_stderr)
-        elif command in 'ABCD':   # cursor position - relative
-            n = params[0]
-            # A - up, B - down, C - forward, D - back
-            x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
-            winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
-
-
-    def convert_osc(self, text):
-        for match in self.ANSI_OSC_RE.finditer(text):
-            start, end = match.span()
-            text = text[:start] + text[end:]
-            paramstring, command = match.groups()
-            if command in '\x07':       # \x07 = BEL
-                params = paramstring.split(";")
-                # 0 - change title and icon (we will only change title)
-                # 1 - change icon (we don't support this)
-                # 2 - change title
-                if params[0] in '02':
-                    winterm.set_title(params[1])
-        return text
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py
deleted file mode 100644
index 430d066..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/initialise.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-import atexit
-import contextlib
-import sys
-
-from .ansitowin32 import AnsiToWin32
-
-
-orig_stdout = None
-orig_stderr = None
-
-wrapped_stdout = None
-wrapped_stderr = None
-
-atexit_done = False
-
-
-def reset_all():
-    if AnsiToWin32 is not None:    # Issue #74: objects might become None at exit
-        AnsiToWin32(orig_stdout).reset_all()
-
-
-def init(autoreset=False, convert=None, strip=None, wrap=True):
-
-    if not wrap and any([autoreset, convert, strip]):
-        raise ValueError('wrap=False conflicts with any other arg=True')
-
-    global wrapped_stdout, wrapped_stderr
-    global orig_stdout, orig_stderr
-
-    orig_stdout = sys.stdout
-    orig_stderr = sys.stderr
-
-    if sys.stdout is None:
-        wrapped_stdout = None
-    else:
-        sys.stdout = wrapped_stdout = \
-            wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
-    if sys.stderr is None:
-        wrapped_stderr = None
-    else:
-        sys.stderr = wrapped_stderr = \
-            wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
-
-    global atexit_done
-    if not atexit_done:
-        atexit.register(reset_all)
-        atexit_done = True
-
-
-def deinit():
-    if orig_stdout is not None:
-        sys.stdout = orig_stdout
-    if orig_stderr is not None:
-        sys.stderr = orig_stderr
-
-
-@contextlib.contextmanager
-def colorama_text(*args, **kwargs):
-    init(*args, **kwargs)
-    try:
-        yield
-    finally:
-        deinit()
-
-
-def reinit():
-    if wrapped_stdout is not None:
-        sys.stdout = wrapped_stdout
-    if wrapped_stderr is not None:
-        sys.stderr = wrapped_stderr
-
-
-def wrap_stream(stream, convert, strip, autoreset, wrap):
-    if wrap:
-        wrapper = AnsiToWin32(stream,
-            convert=convert, strip=strip, autoreset=autoreset)
-        if wrapper.should_wrap():
-            stream = wrapper.stream
-    return stream
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py
deleted file mode 100644
index c2d8360..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/win32.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-
-# from winbase.h
-STDOUT = -11
-STDERR = -12
-
-try:
-    import ctypes
-    from ctypes import LibraryLoader
-    windll = LibraryLoader(ctypes.WinDLL)
-    from ctypes import wintypes
-except (AttributeError, ImportError):
-    windll = None
-    SetConsoleTextAttribute = lambda *_: None
-    winapi_test = lambda *_: None
-else:
-    from ctypes import byref, Structure, c_char, POINTER
-
-    COORD = wintypes._COORD
-
-    class CONSOLE_SCREEN_BUFFER_INFO(Structure):
-        """struct in wincon.h."""
-        _fields_ = [
-            ("dwSize", COORD),
-            ("dwCursorPosition", COORD),
-            ("wAttributes", wintypes.WORD),
-            ("srWindow", wintypes.SMALL_RECT),
-            ("dwMaximumWindowSize", COORD),
-        ]
-        def __str__(self):
-            return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
-                self.dwSize.Y, self.dwSize.X
-                , self.dwCursorPosition.Y, self.dwCursorPosition.X
-                , self.wAttributes
-                , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
-                , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
-            )
-
-    _GetStdHandle = windll.kernel32.GetStdHandle
-    _GetStdHandle.argtypes = [
-        wintypes.DWORD,
-    ]
-    _GetStdHandle.restype = wintypes.HANDLE
-
-    _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
-    _GetConsoleScreenBufferInfo.argtypes = [
-        wintypes.HANDLE,
-        POINTER(CONSOLE_SCREEN_BUFFER_INFO),
-    ]
-    _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
-
-    _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
-    _SetConsoleTextAttribute.argtypes = [
-        wintypes.HANDLE,
-        wintypes.WORD,
-    ]
-    _SetConsoleTextAttribute.restype = wintypes.BOOL
-
-    _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
-    _SetConsoleCursorPosition.argtypes = [
-        wintypes.HANDLE,
-        COORD,
-    ]
-    _SetConsoleCursorPosition.restype = wintypes.BOOL
-
-    _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
-    _FillConsoleOutputCharacterA.argtypes = [
-        wintypes.HANDLE,
-        c_char,
-        wintypes.DWORD,
-        COORD,
-        POINTER(wintypes.DWORD),
-    ]
-    _FillConsoleOutputCharacterA.restype = wintypes.BOOL
-
-    _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
-    _FillConsoleOutputAttribute.argtypes = [
-        wintypes.HANDLE,
-        wintypes.WORD,
-        wintypes.DWORD,
-        COORD,
-        POINTER(wintypes.DWORD),
-    ]
-    _FillConsoleOutputAttribute.restype = wintypes.BOOL
-
-    _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
-    _SetConsoleTitleW.argtypes = [
-        wintypes.LPCWSTR
-    ]
-    _SetConsoleTitleW.restype = wintypes.BOOL
-
-    def _winapi_test(handle):
-        csbi = CONSOLE_SCREEN_BUFFER_INFO()
-        success = _GetConsoleScreenBufferInfo(
-            handle, byref(csbi))
-        return bool(success)
-
-    def winapi_test():
-        return any(_winapi_test(h) for h in
-                   (_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
-
-    def GetConsoleScreenBufferInfo(stream_id=STDOUT):
-        handle = _GetStdHandle(stream_id)
-        csbi = CONSOLE_SCREEN_BUFFER_INFO()
-        success = _GetConsoleScreenBufferInfo(
-            handle, byref(csbi))
-        return csbi
-
-    def SetConsoleTextAttribute(stream_id, attrs):
-        handle = _GetStdHandle(stream_id)
-        return _SetConsoleTextAttribute(handle, attrs)
-
-    def SetConsoleCursorPosition(stream_id, position, adjust=True):
-        position = COORD(*position)
-        # If the position is out of range, do nothing.
-        if position.Y <= 0 or position.X <= 0:
-            return
-        # Adjust for Windows' SetConsoleCursorPosition:
-        #    1. being 0-based, while ANSI is 1-based.
-        #    2. expecting (x,y), while ANSI uses (y,x).
-        adjusted_position = COORD(position.Y - 1, position.X - 1)
-        if adjust:
-            # Adjust for viewport's scroll position
-            sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
-            adjusted_position.Y += sr.Top
-            adjusted_position.X += sr.Left
-        # Resume normal processing
-        handle = _GetStdHandle(stream_id)
-        return _SetConsoleCursorPosition(handle, adjusted_position)
-
-    def FillConsoleOutputCharacter(stream_id, char, length, start):
-        handle = _GetStdHandle(stream_id)
-        char = c_char(char.encode())
-        length = wintypes.DWORD(length)
-        num_written = wintypes.DWORD(0)
-        # Note that this is hard-coded for ANSI (vs wide) bytes.
-        success = _FillConsoleOutputCharacterA(
-            handle, char, length, start, byref(num_written))
-        return num_written.value
-
-    def FillConsoleOutputAttribute(stream_id, attr, length, start):
-        ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
-        handle = _GetStdHandle(stream_id)
-        attribute = wintypes.WORD(attr)
-        length = wintypes.DWORD(length)
-        num_written = wintypes.DWORD(0)
-        # Note that this is hard-coded for ANSI (vs wide) bytes.
-        return _FillConsoleOutputAttribute(
-            handle, attribute, length, start, byref(num_written))
-
-    def SetConsoleTitle(title):
-        return _SetConsoleTitleW(title)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py
deleted file mode 100644
index 0fdb4ec..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/colorama/winterm.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
-from . import win32
-
-
-# from wincon.h
-class WinColor(object):
-    BLACK   = 0
-    BLUE    = 1
-    GREEN   = 2
-    CYAN    = 3
-    RED     = 4
-    MAGENTA = 5
-    YELLOW  = 6
-    GREY    = 7
-
-# from wincon.h
-class WinStyle(object):
-    NORMAL              = 0x00 # dim text, dim background
-    BRIGHT              = 0x08 # bright text, dim background
-    BRIGHT_BACKGROUND   = 0x80 # dim text, bright background
-
-class WinTerm(object):
-
-    def __init__(self):
-        self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
-        self.set_attrs(self._default)
-        self._default_fore = self._fore
-        self._default_back = self._back
-        self._default_style = self._style
-        # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
-        # So that LIGHT_EX colors and BRIGHT style do not clobber each other,
-        # we track them separately, since LIGHT_EX is overwritten by Fore/Back
-        # and BRIGHT is overwritten by Style codes.
-        self._light = 0
-
-    def get_attrs(self):
-        return self._fore + self._back * 16 + (self._style | self._light)
-
-    def set_attrs(self, value):
-        self._fore = value & 7
-        self._back = (value >> 4) & 7
-        self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
-
-    def reset_all(self, on_stderr=None):
-        self.set_attrs(self._default)
-        self.set_console(attrs=self._default)
-        self._light = 0
-
-    def fore(self, fore=None, light=False, on_stderr=False):
-        if fore is None:
-            fore = self._default_fore
-        self._fore = fore
-        # Emulate LIGHT_EX with BRIGHT Style
-        if light:
-            self._light |= WinStyle.BRIGHT
-        else:
-            self._light &= ~WinStyle.BRIGHT
-        self.set_console(on_stderr=on_stderr)
-
-    def back(self, back=None, light=False, on_stderr=False):
-        if back is None:
-            back = self._default_back
-        self._back = back
-        # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
-        if light:
-            self._light |= WinStyle.BRIGHT_BACKGROUND
-        else:
-            self._light &= ~WinStyle.BRIGHT_BACKGROUND
-        self.set_console(on_stderr=on_stderr)
-
-    def style(self, style=None, on_stderr=False):
-        if style is None:
-            style = self._default_style
-        self._style = style
-        self.set_console(on_stderr=on_stderr)
-
-    def set_console(self, attrs=None, on_stderr=False):
-        if attrs is None:
-            attrs = self.get_attrs()
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        win32.SetConsoleTextAttribute(handle, attrs)
-
-    def get_position(self, handle):
-        position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
-        # Because Windows coordinates are 0-based,
-        # and win32.SetConsoleCursorPosition expects 1-based.
-        position.X += 1
-        position.Y += 1
-        return position
-
-    def set_cursor_position(self, position=None, on_stderr=False):
-        if position is None:
-            # I'm not currently tracking the position, so there is no default.
-            # position = self.get_position()
-            return
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        win32.SetConsoleCursorPosition(handle, position)
-
-    def cursor_adjust(self, x, y, on_stderr=False):
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        position = self.get_position(handle)
-        adjusted_position = (position.Y + y, position.X + x)
-        win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
-
-    def erase_screen(self, mode=0, on_stderr=False):
-        # 0 should clear from the cursor to the end of the screen.
-        # 1 should clear from the cursor to the beginning of the screen.
-        # 2 should clear the entire screen, and move cursor to (1,1)
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        csbi = win32.GetConsoleScreenBufferInfo(handle)
-        # get the number of character cells in the current buffer
-        cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
-        # get number of character cells before current cursor position
-        cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
-        if mode == 0:
-            from_coord = csbi.dwCursorPosition
-            cells_to_erase = cells_in_screen - cells_before_cursor
-        elif mode == 1:
-            from_coord = win32.COORD(0, 0)
-            cells_to_erase = cells_before_cursor
-        elif mode == 2:
-            from_coord = win32.COORD(0, 0)
-            cells_to_erase = cells_in_screen
-        else:
-            # invalid mode
-            return
-        # fill the entire screen with blanks
-        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
-        # now set the buffer's attributes accordingly
-        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
-        if mode == 2:
-            # put the cursor where needed
-            win32.SetConsoleCursorPosition(handle, (1, 1))
-
-    def erase_line(self, mode=0, on_stderr=False):
-        # 0 should clear from the cursor to the end of the line.
-        # 1 should clear from the cursor to the beginning of the line.
-        # 2 should clear the entire line.
-        handle = win32.STDOUT
-        if on_stderr:
-            handle = win32.STDERR
-        csbi = win32.GetConsoleScreenBufferInfo(handle)
-        if mode == 0:
-            from_coord = csbi.dwCursorPosition
-            cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
-        elif mode == 1:
-            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
-            cells_to_erase = csbi.dwCursorPosition.X
-        elif mode == 2:
-            from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
-            cells_to_erase = csbi.dwSize.X
-        else:
-            # invalid mode
-            return
-        # fill the entire screen with blanks
-        win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
-        # now set the buffer's attributes accordingly
-        win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
-
-    def set_title(self, title):
-        win32.SetConsoleTitle(title)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/contextlib2.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/contextlib2.py
deleted file mode 100644
index 3aae8f4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/contextlib2.py
+++ /dev/null
@@ -1,518 +0,0 @@
-"""contextlib2 - backports and enhancements to the contextlib module"""
-
-import abc
-import sys
-import warnings
-from collections import deque
-from functools import wraps
-
-__all__ = ["contextmanager", "closing", "nullcontext",
-           "AbstractContextManager",
-           "ContextDecorator", "ExitStack",
-           "redirect_stdout", "redirect_stderr", "suppress"]
-
-# Backwards compatibility
-__all__ += ["ContextStack"]
-
-
-# Backport abc.ABC
-if sys.version_info[:2] >= (3, 4):
-    _abc_ABC = abc.ABC
-else:
-    _abc_ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
-
-
-# Backport classic class MRO
-def _classic_mro(C, result):
-    if C in result:
-        return
-    result.append(C)
-    for B in C.__bases__:
-        _classic_mro(B, result)
-    return result
-
-
-# Backport _collections_abc._check_methods
-def _check_methods(C, *methods):
-    try:
-        mro = C.__mro__
-    except AttributeError:
-        mro = tuple(_classic_mro(C, []))
-
-    for method in methods:
-        for B in mro:
-            if method in B.__dict__:
-                if B.__dict__[method] is None:
-                    return NotImplemented
-                break
-        else:
-            return NotImplemented
-    return True
-
-
-class AbstractContextManager(_abc_ABC):
-    """An abstract base class for context managers."""
-
-    def __enter__(self):
-        """Return `self` upon entering the runtime context."""
-        return self
-
-    @abc.abstractmethod
-    def __exit__(self, exc_type, exc_value, traceback):
-        """Raise any exception triggered within the runtime context."""
-        return None
-
-    @classmethod
-    def __subclasshook__(cls, C):
-        """Check whether subclass is considered a subclass of this ABC."""
-        if cls is AbstractContextManager:
-            return _check_methods(C, "__enter__", "__exit__")
-        return NotImplemented
-
-
-class ContextDecorator(object):
-    """A base class or mixin that enables context managers to work as decorators."""
-
-    def refresh_cm(self):
-        """Returns the context manager used to actually wrap the call to the
-        decorated function.
-
-        The default implementation just returns *self*.
-
-        Overriding this method allows otherwise one-shot context managers
-        like _GeneratorContextManager to support use as decorators via
-        implicit recreation.
-
-        DEPRECATED: refresh_cm was never added to the standard library's
-                    ContextDecorator API
-        """
-        warnings.warn("refresh_cm was never added to the standard library",
-                      DeprecationWarning)
-        return self._recreate_cm()
-
-    def _recreate_cm(self):
-        """Return a recreated instance of self.
-
-        Allows an otherwise one-shot context manager like
-        _GeneratorContextManager to support use as
-        a decorator via implicit recreation.
-
-        This is a private interface just for _GeneratorContextManager.
-        See issue #11647 for details.
-        """
-        return self
-
-    def __call__(self, func):
-        @wraps(func)
-        def inner(*args, **kwds):
-            with self._recreate_cm():
-                return func(*args, **kwds)
-        return inner
-
-
-class _GeneratorContextManager(ContextDecorator):
-    """Helper for @contextmanager decorator."""
-
-    def __init__(self, func, args, kwds):
-        self.gen = func(*args, **kwds)
-        self.func, self.args, self.kwds = func, args, kwds
-        # Issue 19330: ensure context manager instances have good docstrings
-        doc = getattr(func, "__doc__", None)
-        if doc is None:
-            doc = type(self).__doc__
-        self.__doc__ = doc
-        # Unfortunately, this still doesn't provide good help output when
-        # inspecting the created context manager instances, since pydoc
-        # currently bypasses the instance docstring and shows the docstring
-        # for the class instead.
-        # See http://bugs.python.org/issue19404 for more details.
-
-    def _recreate_cm(self):
-        # _GCM instances are one-shot context managers, so the
-        # CM must be recreated each time a decorated function is
-        # called
-        return self.__class__(self.func, self.args, self.kwds)
-
-    def __enter__(self):
-        try:
-            return next(self.gen)
-        except StopIteration:
-            raise RuntimeError("generator didn't yield")
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            try:
-                next(self.gen)
-            except StopIteration:
-                return
-            else:
-                raise RuntimeError("generator didn't stop")
-        else:
-            if value is None:
-                # Need to force instantiation so we can reliably
-                # tell if we get the same exception back
-                value = type()
-            try:
-                self.gen.throw(type, value, traceback)
-                raise RuntimeError("generator didn't stop after throw()")
-            except StopIteration as exc:
-                # Suppress StopIteration *unless* it's the same exception that
-                # was passed to throw().  This prevents a StopIteration
-                # raised inside the "with" statement from being suppressed.
-                return exc is not value
-            except RuntimeError as exc:
-                # Don't re-raise the passed in exception
-                if exc is value:
-                    return False
-                # Likewise, avoid suppressing if a StopIteration exception
-                # was passed to throw() and later wrapped into a RuntimeError
-                # (see PEP 479).
-                if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value:
-                    return False
-                raise
-            except:
-                # only re-raise if it's *not* the exception that was
-                # passed to throw(), because __exit__() must not raise
-                # an exception unless __exit__() itself failed.  But throw()
-                # has to raise the exception to signal propagation, so this
-                # fixes the impedance mismatch between the throw() protocol
-                # and the __exit__() protocol.
-                #
-                if sys.exc_info()[1] is not value:
-                    raise
-
-
-def contextmanager(func):
-    """@contextmanager decorator.
-
-    Typical usage:
-
-        @contextmanager
-        def some_generator():
-            
-            try:
-                yield 
-            finally:
-                
-
-    This makes this:
-
-        with some_generator() as :
-            
-
-    equivalent to this:
-
-        
-        try:
-             = 
-            
-        finally:
-            
-
-    """
-    @wraps(func)
-    def helper(*args, **kwds):
-        return _GeneratorContextManager(func, args, kwds)
-    return helper
-
-
-class closing(object):
-    """Context to automatically close something at the end of a block.
-
-    Code like this:
-
-        with closing(.open()) as f:
-            
-
-    is equivalent to this:
-
-        f = .open()
-        try:
-            
-        finally:
-            f.close()
-
-    """
-    def __init__(self, thing):
-        self.thing = thing
-
-    def __enter__(self):
-        return self.thing
-
-    def __exit__(self, *exc_info):
-        self.thing.close()
-
-
-class _RedirectStream(object):
-
-    _stream = None
-
-    def __init__(self, new_target):
-        self._new_target = new_target
-        # We use a list of old targets to make this CM re-entrant
-        self._old_targets = []
-
-    def __enter__(self):
-        self._old_targets.append(getattr(sys, self._stream))
-        setattr(sys, self._stream, self._new_target)
-        return self._new_target
-
-    def __exit__(self, exctype, excinst, exctb):
-        setattr(sys, self._stream, self._old_targets.pop())
-
-
-class redirect_stdout(_RedirectStream):
-    """Context manager for temporarily redirecting stdout to another file.
-
-        # How to send help() to stderr
-        with redirect_stdout(sys.stderr):
-            help(dir)
-
-        # How to write help() to a file
-        with open('help.txt', 'w') as f:
-            with redirect_stdout(f):
-                help(pow)
-    """
-
-    _stream = "stdout"
-
-
-class redirect_stderr(_RedirectStream):
-    """Context manager for temporarily redirecting stderr to another file."""
-
-    _stream = "stderr"
-
-
-class suppress(object):
-    """Context manager to suppress specified exceptions
-
-    After the exception is suppressed, execution proceeds with the next
-    statement following the with statement.
-
-         with suppress(FileNotFoundError):
-             os.remove(somefile)
-         # Execution still resumes here if the file was already removed
-    """
-
-    def __init__(self, *exceptions):
-        self._exceptions = exceptions
-
-    def __enter__(self):
-        pass
-
-    def __exit__(self, exctype, excinst, exctb):
-        # Unlike isinstance and issubclass, CPython exception handling
-        # currently only looks at the concrete type hierarchy (ignoring
-        # the instance and subclass checking hooks). While Guido considers
-        # that a bug rather than a feature, it's a fairly hard one to fix
-        # due to various internal implementation details. suppress provides
-        # the simpler issubclass based semantics, rather than trying to
-        # exactly reproduce the limitations of the CPython interpreter.
-        #
-        # See http://bugs.python.org/issue12029 for more details
-        return exctype is not None and issubclass(exctype, self._exceptions)
-
-
-# Context manipulation is Python 3 only
-_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3
-if _HAVE_EXCEPTION_CHAINING:
-    def _make_context_fixer(frame_exc):
-        def _fix_exception_context(new_exc, old_exc):
-            # Context may not be correct, so find the end of the chain
-            while 1:
-                exc_context = new_exc.__context__
-                if exc_context is old_exc:
-                    # Context is already set correctly (see issue 20317)
-                    return
-                if exc_context is None or exc_context is frame_exc:
-                    break
-                new_exc = exc_context
-            # Change the end of the chain to point to the exception
-            # we expect it to reference
-            new_exc.__context__ = old_exc
-        return _fix_exception_context
-
-    def _reraise_with_existing_context(exc_details):
-        try:
-            # bare "raise exc_details[1]" replaces our carefully
-            # set-up context
-            fixed_ctx = exc_details[1].__context__
-            raise exc_details[1]
-        except BaseException:
-            exc_details[1].__context__ = fixed_ctx
-            raise
-else:
-    # No exception context in Python 2
-    def _make_context_fixer(frame_exc):
-        return lambda new_exc, old_exc: None
-
-    # Use 3 argument raise in Python 2,
-    # but use exec to avoid SyntaxError in Python 3
-    def _reraise_with_existing_context(exc_details):
-        exc_type, exc_value, exc_tb = exc_details
-        exec("raise exc_type, exc_value, exc_tb")
-
-# Handle old-style classes if they exist
-try:
-    from types import InstanceType
-except ImportError:
-    # Python 3 doesn't have old-style classes
-    _get_type = type
-else:
-    # Need to handle old-style context managers on Python 2
-    def _get_type(obj):
-        obj_type = type(obj)
-        if obj_type is InstanceType:
-            return obj.__class__  # Old-style class
-        return obj_type  # New-style class
-
-
-# Inspired by discussions on http://bugs.python.org/issue13585
-class ExitStack(object):
-    """Context manager for dynamic management of a stack of exit callbacks
-
-    For example:
-
-        with ExitStack() as stack:
-            files = [stack.enter_context(open(fname)) for fname in filenames]
-            # All opened files will automatically be closed at the end of
-            # the with statement, even if attempts to open files later
-            # in the list raise an exception
-
-    """
-    def __init__(self):
-        self._exit_callbacks = deque()
-
-    def pop_all(self):
-        """Preserve the context stack by transferring it to a new instance"""
-        new_stack = type(self)()
-        new_stack._exit_callbacks = self._exit_callbacks
-        self._exit_callbacks = deque()
-        return new_stack
-
-    def _push_cm_exit(self, cm, cm_exit):
-        """Helper to correctly register callbacks to __exit__ methods"""
-        def _exit_wrapper(*exc_details):
-            return cm_exit(cm, *exc_details)
-        _exit_wrapper.__self__ = cm
-        self.push(_exit_wrapper)
-
-    def push(self, exit):
-        """Registers a callback with the standard __exit__ method signature
-
-        Can suppress exceptions the same way __exit__ methods can.
-
-        Also accepts any object with an __exit__ method (registering a call
-        to the method instead of the object itself)
-        """
-        # We use an unbound method rather than a bound method to follow
-        # the standard lookup behaviour for special methods
-        _cb_type = _get_type(exit)
-        try:
-            exit_method = _cb_type.__exit__
-        except AttributeError:
-            # Not a context manager, so assume its a callable
-            self._exit_callbacks.append(exit)
-        else:
-            self._push_cm_exit(exit, exit_method)
-        return exit # Allow use as a decorator
-
-    def callback(self, callback, *args, **kwds):
-        """Registers an arbitrary callback and arguments.
-
-        Cannot suppress exceptions.
-        """
-        def _exit_wrapper(exc_type, exc, tb):
-            callback(*args, **kwds)
-        # We changed the signature, so using @wraps is not appropriate, but
-        # setting __wrapped__ may still help with introspection
-        _exit_wrapper.__wrapped__ = callback
-        self.push(_exit_wrapper)
-        return callback # Allow use as a decorator
-
-    def enter_context(self, cm):
-        """Enters the supplied context manager
-
-        If successful, also pushes its __exit__ method as a callback and
-        returns the result of the __enter__ method.
-        """
-        # We look up the special methods on the type to match the with statement
-        _cm_type = _get_type(cm)
-        _exit = _cm_type.__exit__
-        result = _cm_type.__enter__(cm)
-        self._push_cm_exit(cm, _exit)
-        return result
-
-    def close(self):
-        """Immediately unwind the context stack"""
-        self.__exit__(None, None, None)
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *exc_details):
-        received_exc = exc_details[0] is not None
-
-        # We manipulate the exception state so it behaves as though
-        # we were actually nesting multiple with statements
-        frame_exc = sys.exc_info()[1]
-        _fix_exception_context = _make_context_fixer(frame_exc)
-
-        # Callbacks are invoked in LIFO order to match the behaviour of
-        # nested context managers
-        suppressed_exc = False
-        pending_raise = False
-        while self._exit_callbacks:
-            cb = self._exit_callbacks.pop()
-            try:
-                if cb(*exc_details):
-                    suppressed_exc = True
-                    pending_raise = False
-                    exc_details = (None, None, None)
-            except:
-                new_exc_details = sys.exc_info()
-                # simulate the stack of exceptions by setting the context
-                _fix_exception_context(new_exc_details[1], exc_details[1])
-                pending_raise = True
-                exc_details = new_exc_details
-        if pending_raise:
-            _reraise_with_existing_context(exc_details)
-        return received_exc and suppressed_exc
-
-
-# Preserve backwards compatibility
-class ContextStack(ExitStack):
-    """Backwards compatibility alias for ExitStack"""
-
-    def __init__(self):
-        warnings.warn("ContextStack has been renamed to ExitStack",
-                      DeprecationWarning)
-        super(ContextStack, self).__init__()
-
-    def register_exit(self, callback):
-        return self.push(callback)
-
-    def register(self, callback, *args, **kwds):
-        return self.callback(callback, *args, **kwds)
-
-    def preserve(self):
-        return self.pop_all()
-
-
-class nullcontext(AbstractContextManager):
-    """Context manager that does no additional processing.
-    Used as a stand-in for a normal context manager, when a particular
-    block of code is only sometimes used with a normal context manager:
-    cm = optional_cm if condition else nullcontext()
-    with cm:
-        # Perform operation, using optional_cm if condition is True
-    """
-
-    def __init__(self, enter_result=None):
-        self.enter_result = enter_result
-
-    def __enter__(self):
-        return self.enter_result
-
-    def __exit__(self, *excinfo):
-        pass
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py
deleted file mode 100644
index a2d70d4..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/__init__.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012-2019 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-import logging
-
-__version__ = '0.2.9.post0'
-
-class DistlibException(Exception):
-    pass
-
-try:
-    from logging import NullHandler
-except ImportError: # pragma: no cover
-    class NullHandler(logging.Handler):
-        def handle(self, record): pass
-        def emit(self, record): pass
-        def createLock(self): self.lock = None
-
-logger = logging.getLogger(__name__)
-logger.addHandler(NullHandler())
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py
deleted file mode 100644
index f7dbf4c..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-"""Modules copied from Python 3 standard libraries, for internal use only.
-
-Individual classes and functions are found in d2._backport.misc.  Intended
-usage is to always import things missing from 3.1 from that module: the
-built-in/stdlib objects will be used if found.
-"""
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py
deleted file mode 100644
index cfb318d..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/misc.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""Backports for individual classes and functions."""
-
-import os
-import sys
-
-__all__ = ['cache_from_source', 'callable', 'fsencode']
-
-
-try:
-    from imp import cache_from_source
-except ImportError:
-    def cache_from_source(py_file, debug=__debug__):
-        ext = debug and 'c' or 'o'
-        return py_file + ext
-
-
-try:
-    callable = callable
-except NameError:
-    from collections import Callable
-
-    def callable(obj):
-        return isinstance(obj, Callable)
-
-
-try:
-    fsencode = os.fsencode
-except AttributeError:
-    def fsencode(filename):
-        if isinstance(filename, bytes):
-            return filename
-        elif isinstance(filename, str):
-            return filename.encode(sys.getfilesystemencoding())
-        else:
-            raise TypeError("expect bytes or str, not %s" %
-                            type(filename).__name__)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
deleted file mode 100644
index 159e49e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/shutil.py
+++ /dev/null
@@ -1,761 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""Utility functions for copying and archiving files and directory trees.
-
-XXX The functions here don't copy the resource fork or other metadata on Mac.
-
-"""
-
-import os
-import sys
-import stat
-from os.path import abspath
-import fnmatch
-import collections
-import errno
-from . import tarfile
-
-try:
-    import bz2
-    _BZ2_SUPPORTED = True
-except ImportError:
-    _BZ2_SUPPORTED = False
-
-try:
-    from pwd import getpwnam
-except ImportError:
-    getpwnam = None
-
-try:
-    from grp import getgrnam
-except ImportError:
-    getgrnam = None
-
-__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
-           "copytree", "move", "rmtree", "Error", "SpecialFileError",
-           "ExecError", "make_archive", "get_archive_formats",
-           "register_archive_format", "unregister_archive_format",
-           "get_unpack_formats", "register_unpack_format",
-           "unregister_unpack_format", "unpack_archive", "ignore_patterns"]
-
-class Error(EnvironmentError):
-    pass
-
-class SpecialFileError(EnvironmentError):
-    """Raised when trying to do a kind of operation (e.g. copying) which is
-    not supported on a special file (e.g. a named pipe)"""
-
-class ExecError(EnvironmentError):
-    """Raised when a command could not be executed"""
-
-class ReadError(EnvironmentError):
-    """Raised when an archive cannot be read"""
-
-class RegistryError(Exception):
-    """Raised when a registry operation with the archiving
-    and unpacking registries fails"""
-
-
-try:
-    WindowsError
-except NameError:
-    WindowsError = None
-
-def copyfileobj(fsrc, fdst, length=16*1024):
-    """copy data from file-like object fsrc to file-like object fdst"""
-    while 1:
-        buf = fsrc.read(length)
-        if not buf:
-            break
-        fdst.write(buf)
-
-def _samefile(src, dst):
-    # Macintosh, Unix.
-    if hasattr(os.path, 'samefile'):
-        try:
-            return os.path.samefile(src, dst)
-        except OSError:
-            return False
-
-    # All other platforms: check for same pathname.
-    return (os.path.normcase(os.path.abspath(src)) ==
-            os.path.normcase(os.path.abspath(dst)))
-
-def copyfile(src, dst):
-    """Copy data from src to dst"""
-    if _samefile(src, dst):
-        raise Error("`%s` and `%s` are the same file" % (src, dst))
-
-    for fn in [src, dst]:
-        try:
-            st = os.stat(fn)
-        except OSError:
-            # File most likely does not exist
-            pass
-        else:
-            # XXX What about other special files? (sockets, devices...)
-            if stat.S_ISFIFO(st.st_mode):
-                raise SpecialFileError("`%s` is a named pipe" % fn)
-
-    with open(src, 'rb') as fsrc:
-        with open(dst, 'wb') as fdst:
-            copyfileobj(fsrc, fdst)
-
-def copymode(src, dst):
-    """Copy mode bits from src to dst"""
-    if hasattr(os, 'chmod'):
-        st = os.stat(src)
-        mode = stat.S_IMODE(st.st_mode)
-        os.chmod(dst, mode)
-
-def copystat(src, dst):
-    """Copy all stat info (mode bits, atime, mtime, flags) from src to dst"""
-    st = os.stat(src)
-    mode = stat.S_IMODE(st.st_mode)
-    if hasattr(os, 'utime'):
-        os.utime(dst, (st.st_atime, st.st_mtime))
-    if hasattr(os, 'chmod'):
-        os.chmod(dst, mode)
-    if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
-        try:
-            os.chflags(dst, st.st_flags)
-        except OSError as why:
-            if (not hasattr(errno, 'EOPNOTSUPP') or
-                why.errno != errno.EOPNOTSUPP):
-                raise
-
-def copy(src, dst):
-    """Copy data and mode bits ("cp src dst").
-
-    The destination may be a directory.
-
-    """
-    if os.path.isdir(dst):
-        dst = os.path.join(dst, os.path.basename(src))
-    copyfile(src, dst)
-    copymode(src, dst)
-
-def copy2(src, dst):
-    """Copy data and all stat info ("cp -p src dst").
-
-    The destination may be a directory.
-
-    """
-    if os.path.isdir(dst):
-        dst = os.path.join(dst, os.path.basename(src))
-    copyfile(src, dst)
-    copystat(src, dst)
-
-def ignore_patterns(*patterns):
-    """Function that can be used as copytree() ignore parameter.
-
-    Patterns is a sequence of glob-style patterns
-    that are used to exclude files"""
-    def _ignore_patterns(path, names):
-        ignored_names = []
-        for pattern in patterns:
-            ignored_names.extend(fnmatch.filter(names, pattern))
-        return set(ignored_names)
-    return _ignore_patterns
-
-def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
-             ignore_dangling_symlinks=False):
-    """Recursively copy a directory tree.
-
-    The destination directory must not already exist.
-    If exception(s) occur, an Error is raised with a list of reasons.
-
-    If the optional symlinks flag is true, symbolic links in the
-    source tree result in symbolic links in the destination tree; if
-    it is false, the contents of the files pointed to by symbolic
-    links are copied. If the file pointed by the symlink doesn't
-    exist, an exception will be added in the list of errors raised in
-    an Error exception at the end of the copy process.
-
-    You can set the optional ignore_dangling_symlinks flag to true if you
-    want to silence this exception. Notice that this has no effect on
-    platforms that don't support os.symlink.
-
-    The optional ignore argument is a callable. If given, it
-    is called with the `src` parameter, which is the directory
-    being visited by copytree(), and `names` which is the list of
-    `src` contents, as returned by os.listdir():
-
-        callable(src, names) -> ignored_names
-
-    Since copytree() is called recursively, the callable will be
-    called once for each directory that is copied. It returns a
-    list of names relative to the `src` directory that should
-    not be copied.
-
-    The optional copy_function argument is a callable that will be used
-    to copy each file. It will be called with the source path and the
-    destination path as arguments. By default, copy2() is used, but any
-    function that supports the same signature (like copy()) can be used.
-
-    """
-    names = os.listdir(src)
-    if ignore is not None:
-        ignored_names = ignore(src, names)
-    else:
-        ignored_names = set()
-
-    os.makedirs(dst)
-    errors = []
-    for name in names:
-        if name in ignored_names:
-            continue
-        srcname = os.path.join(src, name)
-        dstname = os.path.join(dst, name)
-        try:
-            if os.path.islink(srcname):
-                linkto = os.readlink(srcname)
-                if symlinks:
-                    os.symlink(linkto, dstname)
-                else:
-                    # ignore dangling symlink if the flag is on
-                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
-                        continue
-                    # otherwise let the copy occurs. copy2 will raise an error
-                    copy_function(srcname, dstname)
-            elif os.path.isdir(srcname):
-                copytree(srcname, dstname, symlinks, ignore, copy_function)
-            else:
-                # Will raise a SpecialFileError for unsupported file types
-                copy_function(srcname, dstname)
-        # catch the Error from the recursive copytree so that we can
-        # continue with other files
-        except Error as err:
-            errors.extend(err.args[0])
-        except EnvironmentError as why:
-            errors.append((srcname, dstname, str(why)))
-    try:
-        copystat(src, dst)
-    except OSError as why:
-        if WindowsError is not None and isinstance(why, WindowsError):
-            # Copying file access times may fail on Windows
-            pass
-        else:
-            errors.extend((src, dst, str(why)))
-    if errors:
-        raise Error(errors)
-
-def rmtree(path, ignore_errors=False, onerror=None):
-    """Recursively delete a directory tree.
-
-    If ignore_errors is set, errors are ignored; otherwise, if onerror
-    is set, it is called to handle the error with arguments (func,
-    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
-    path is the argument to that function that caused it to fail; and
-    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
-    is false and onerror is None, an exception is raised.
-
-    """
-    if ignore_errors:
-        def onerror(*args):
-            pass
-    elif onerror is None:
-        def onerror(*args):
-            raise
-    try:
-        if os.path.islink(path):
-            # symlinks to directories are forbidden, see bug #1669
-            raise OSError("Cannot call rmtree on a symbolic link")
-    except OSError:
-        onerror(os.path.islink, path, sys.exc_info())
-        # can't continue even if onerror hook returns
-        return
-    names = []
-    try:
-        names = os.listdir(path)
-    except os.error:
-        onerror(os.listdir, path, sys.exc_info())
-    for name in names:
-        fullname = os.path.join(path, name)
-        try:
-            mode = os.lstat(fullname).st_mode
-        except os.error:
-            mode = 0
-        if stat.S_ISDIR(mode):
-            rmtree(fullname, ignore_errors, onerror)
-        else:
-            try:
-                os.remove(fullname)
-            except os.error:
-                onerror(os.remove, fullname, sys.exc_info())
-    try:
-        os.rmdir(path)
-    except os.error:
-        onerror(os.rmdir, path, sys.exc_info())
-
-
-def _basename(path):
-    # A basename() variant which first strips the trailing slash, if present.
-    # Thus we always get the last component of the path, even for directories.
-    return os.path.basename(path.rstrip(os.path.sep))
-
-def move(src, dst):
-    """Recursively move a file or directory to another location. This is
-    similar to the Unix "mv" command.
-
-    If the destination is a directory or a symlink to a directory, the source
-    is moved inside the directory. The destination path must not already
-    exist.
-
-    If the destination already exists but is not a directory, it may be
-    overwritten depending on os.rename() semantics.
-
-    If the destination is on our current filesystem, then rename() is used.
-    Otherwise, src is copied to the destination and then removed.
-    A lot more could be done here...  A look at a mv.c shows a lot of
-    the issues this implementation glosses over.
-
-    """
-    real_dst = dst
-    if os.path.isdir(dst):
-        if _samefile(src, dst):
-            # We might be on a case insensitive filesystem,
-            # perform the rename anyway.
-            os.rename(src, dst)
-            return
-
-        real_dst = os.path.join(dst, _basename(src))
-        if os.path.exists(real_dst):
-            raise Error("Destination path '%s' already exists" % real_dst)
-    try:
-        os.rename(src, real_dst)
-    except OSError:
-        if os.path.isdir(src):
-            if _destinsrc(src, dst):
-                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
-            copytree(src, real_dst, symlinks=True)
-            rmtree(src)
-        else:
-            copy2(src, real_dst)
-            os.unlink(src)
-
-def _destinsrc(src, dst):
-    src = abspath(src)
-    dst = abspath(dst)
-    if not src.endswith(os.path.sep):
-        src += os.path.sep
-    if not dst.endswith(os.path.sep):
-        dst += os.path.sep
-    return dst.startswith(src)
-
-def _get_gid(name):
-    """Returns a gid, given a group name."""
-    if getgrnam is None or name is None:
-        return None
-    try:
-        result = getgrnam(name)
-    except KeyError:
-        result = None
-    if result is not None:
-        return result[2]
-    return None
-
-def _get_uid(name):
-    """Returns an uid, given a user name."""
-    if getpwnam is None or name is None:
-        return None
-    try:
-        result = getpwnam(name)
-    except KeyError:
-        result = None
-    if result is not None:
-        return result[2]
-    return None
-
-def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
-                  owner=None, group=None, logger=None):
-    """Create a (possibly compressed) tar file from all the files under
-    'base_dir'.
-
-    'compress' must be "gzip" (the default), "bzip2", or None.
-
-    'owner' and 'group' can be used to define an owner and a group for the
-    archive that is being built. If not provided, the current owner and group
-    will be used.
-
-    The output tar file will be named 'base_name' +  ".tar", possibly plus
-    the appropriate compression extension (".gz", or ".bz2").
-
-    Returns the output filename.
-    """
-    tar_compression = {'gzip': 'gz', None: ''}
-    compress_ext = {'gzip': '.gz'}
-
-    if _BZ2_SUPPORTED:
-        tar_compression['bzip2'] = 'bz2'
-        compress_ext['bzip2'] = '.bz2'
-
-    # flags for compression program, each element of list will be an argument
-    if compress is not None and compress not in compress_ext:
-        raise ValueError("bad value for 'compress', or compression format not "
-                         "supported : {0}".format(compress))
-
-    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
-    archive_dir = os.path.dirname(archive_name)
-
-    if not os.path.exists(archive_dir):
-        if logger is not None:
-            logger.info("creating %s", archive_dir)
-        if not dry_run:
-            os.makedirs(archive_dir)
-
-    # creating the tarball
-    if logger is not None:
-        logger.info('Creating tar archive')
-
-    uid = _get_uid(owner)
-    gid = _get_gid(group)
-
-    def _set_uid_gid(tarinfo):
-        if gid is not None:
-            tarinfo.gid = gid
-            tarinfo.gname = group
-        if uid is not None:
-            tarinfo.uid = uid
-            tarinfo.uname = owner
-        return tarinfo
-
-    if not dry_run:
-        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress])
-        try:
-            tar.add(base_dir, filter=_set_uid_gid)
-        finally:
-            tar.close()
-
-    return archive_name
-
-def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
-    # XXX see if we want to keep an external call here
-    if verbose:
-        zipoptions = "-r"
-    else:
-        zipoptions = "-rq"
-    from distutils.errors import DistutilsExecError
-    from distutils.spawn import spawn
-    try:
-        spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run)
-    except DistutilsExecError:
-        # XXX really should distinguish between "couldn't find
-        # external 'zip' command" and "zip failed".
-        raise ExecError("unable to create zip file '%s': "
-            "could neither import the 'zipfile' module nor "
-            "find a standalone zip utility") % zip_filename
-
-def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
-    """Create a zip file from all the files under 'base_dir'.
-
-    The output zip file will be named 'base_name' + ".zip".  Uses either the
-    "zipfile" Python module (if available) or the InfoZIP "zip" utility
-    (if installed and found on the default search path).  If neither tool is
-    available, raises ExecError.  Returns the name of the output zip
-    file.
-    """
-    zip_filename = base_name + ".zip"
-    archive_dir = os.path.dirname(base_name)
-
-    if not os.path.exists(archive_dir):
-        if logger is not None:
-            logger.info("creating %s", archive_dir)
-        if not dry_run:
-            os.makedirs(archive_dir)
-
-    # If zipfile module is not available, try spawning an external 'zip'
-    # command.
-    try:
-        import zipfile
-    except ImportError:
-        zipfile = None
-
-    if zipfile is None:
-        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
-    else:
-        if logger is not None:
-            logger.info("creating '%s' and adding '%s' to it",
-                        zip_filename, base_dir)
-
-        if not dry_run:
-            zip = zipfile.ZipFile(zip_filename, "w",
-                                  compression=zipfile.ZIP_DEFLATED)
-
-            for dirpath, dirnames, filenames in os.walk(base_dir):
-                for name in filenames:
-                    path = os.path.normpath(os.path.join(dirpath, name))
-                    if os.path.isfile(path):
-                        zip.write(path, path)
-                        if logger is not None:
-                            logger.info("adding '%s'", path)
-            zip.close()
-
-    return zip_filename
-
-_ARCHIVE_FORMATS = {
-    'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
-    'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
-    'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
-    'zip':   (_make_zipfile, [], "ZIP file"),
-    }
-
-if _BZ2_SUPPORTED:
-    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
-                                "bzip2'ed tar-file")
-
-def get_archive_formats():
-    """Returns a list of supported formats for archiving and unarchiving.
-
-    Each element of the returned sequence is a tuple (name, description)
-    """
-    formats = [(name, registry[2]) for name, registry in
-               _ARCHIVE_FORMATS.items()]
-    formats.sort()
-    return formats
-
-def register_archive_format(name, function, extra_args=None, description=''):
-    """Registers an archive format.
-
-    name is the name of the format. function is the callable that will be
-    used to create archives. If provided, extra_args is a sequence of
-    (name, value) tuples that will be passed as arguments to the callable.
-    description can be provided to describe the format, and will be returned
-    by the get_archive_formats() function.
-    """
-    if extra_args is None:
-        extra_args = []
-    if not isinstance(function, collections.Callable):
-        raise TypeError('The %s object is not callable' % function)
-    if not isinstance(extra_args, (tuple, list)):
-        raise TypeError('extra_args needs to be a sequence')
-    for element in extra_args:
-        if not isinstance(element, (tuple, list)) or len(element) !=2:
-            raise TypeError('extra_args elements are : (arg_name, value)')
-
-    _ARCHIVE_FORMATS[name] = (function, extra_args, description)
-
-def unregister_archive_format(name):
-    del _ARCHIVE_FORMATS[name]
-
-def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
-                 dry_run=0, owner=None, group=None, logger=None):
-    """Create an archive file (eg. zip or tar).
-
-    'base_name' is the name of the file to create, minus any format-specific
-    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
-    or "gztar".
-
-    'root_dir' is a directory that will be the root directory of the
-    archive; ie. we typically chdir into 'root_dir' before creating the
-    archive.  'base_dir' is the directory where we start archiving from;
-    ie. 'base_dir' will be the common prefix of all files and
-    directories in the archive.  'root_dir' and 'base_dir' both default
-    to the current directory.  Returns the name of the archive file.
-
-    'owner' and 'group' are used when creating a tar archive. By default,
-    uses the current owner and group.
-    """
-    save_cwd = os.getcwd()
-    if root_dir is not None:
-        if logger is not None:
-            logger.debug("changing into '%s'", root_dir)
-        base_name = os.path.abspath(base_name)
-        if not dry_run:
-            os.chdir(root_dir)
-
-    if base_dir is None:
-        base_dir = os.curdir
-
-    kwargs = {'dry_run': dry_run, 'logger': logger}
-
-    try:
-        format_info = _ARCHIVE_FORMATS[format]
-    except KeyError:
-        raise ValueError("unknown archive format '%s'" % format)
-
-    func = format_info[0]
-    for arg, val in format_info[1]:
-        kwargs[arg] = val
-
-    if format != 'zip':
-        kwargs['owner'] = owner
-        kwargs['group'] = group
-
-    try:
-        filename = func(base_name, base_dir, **kwargs)
-    finally:
-        if root_dir is not None:
-            if logger is not None:
-                logger.debug("changing back to '%s'", save_cwd)
-            os.chdir(save_cwd)
-
-    return filename
-
-
-def get_unpack_formats():
-    """Returns a list of supported formats for unpacking.
-
-    Each element of the returned sequence is a tuple
-    (name, extensions, description)
-    """
-    formats = [(name, info[0], info[3]) for name, info in
-               _UNPACK_FORMATS.items()]
-    formats.sort()
-    return formats
-
-def _check_unpack_options(extensions, function, extra_args):
-    """Checks what gets registered as an unpacker."""
-    # first make sure no other unpacker is registered for this extension
-    existing_extensions = {}
-    for name, info in _UNPACK_FORMATS.items():
-        for ext in info[0]:
-            existing_extensions[ext] = name
-
-    for extension in extensions:
-        if extension in existing_extensions:
-            msg = '%s is already registered for "%s"'
-            raise RegistryError(msg % (extension,
-                                       existing_extensions[extension]))
-
-    if not isinstance(function, collections.Callable):
-        raise TypeError('The registered function must be a callable')
-
-
-def register_unpack_format(name, extensions, function, extra_args=None,
-                           description=''):
-    """Registers an unpack format.
-
-    `name` is the name of the format. `extensions` is a list of extensions
-    corresponding to the format.
-
-    `function` is the callable that will be
-    used to unpack archives. The callable will receive archives to unpack.
-    If it's unable to handle an archive, it needs to raise a ReadError
-    exception.
-
-    If provided, `extra_args` is a sequence of
-    (name, value) tuples that will be passed as arguments to the callable.
-    description can be provided to describe the format, and will be returned
-    by the get_unpack_formats() function.
-    """
-    if extra_args is None:
-        extra_args = []
-    _check_unpack_options(extensions, function, extra_args)
-    _UNPACK_FORMATS[name] = extensions, function, extra_args, description
-
-def unregister_unpack_format(name):
-    """Removes the pack format from the registry."""
-    del _UNPACK_FORMATS[name]
-
-def _ensure_directory(path):
-    """Ensure that the parent directory of `path` exists"""
-    dirname = os.path.dirname(path)
-    if not os.path.isdir(dirname):
-        os.makedirs(dirname)
-
-def _unpack_zipfile(filename, extract_dir):
-    """Unpack zip `filename` to `extract_dir`
-    """
-    try:
-        import zipfile
-    except ImportError:
-        raise ReadError('zlib not supported, cannot unpack this archive.')
-
-    if not zipfile.is_zipfile(filename):
-        raise ReadError("%s is not a zip file" % filename)
-
-    zip = zipfile.ZipFile(filename)
-    try:
-        for info in zip.infolist():
-            name = info.filename
-
-            # don't extract absolute paths or ones with .. in them
-            if name.startswith('/') or '..' in name:
-                continue
-
-            target = os.path.join(extract_dir, *name.split('/'))
-            if not target:
-                continue
-
-            _ensure_directory(target)
-            if not name.endswith('/'):
-                # file
-                data = zip.read(info.filename)
-                f = open(target, 'wb')
-                try:
-                    f.write(data)
-                finally:
-                    f.close()
-                    del data
-    finally:
-        zip.close()
-
-def _unpack_tarfile(filename, extract_dir):
-    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
-    """
-    try:
-        tarobj = tarfile.open(filename)
-    except tarfile.TarError:
-        raise ReadError(
-            "%s is not a compressed or uncompressed tar file" % filename)
-    try:
-        tarobj.extractall(extract_dir)
-    finally:
-        tarobj.close()
-
-_UNPACK_FORMATS = {
-    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
-    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
-    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
-    }
-
-if _BZ2_SUPPORTED:
-    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
-                                "bzip2'ed tar-file")
-
-def _find_unpack_format(filename):
-    for name, info in _UNPACK_FORMATS.items():
-        for extension in info[0]:
-            if filename.endswith(extension):
-                return name
-    return None
-
-def unpack_archive(filename, extract_dir=None, format=None):
-    """Unpack an archive.
-
-    `filename` is the name of the archive.
-
-    `extract_dir` is the name of the target directory, where the archive
-    is unpacked. If not provided, the current working directory is used.
-
-    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
-    other registered format. If not provided, unpack_archive will use the
-    filename extension and see if an unpacker was registered for that
-    extension.
-
-    In case none is found, a ValueError is raised.
-    """
-    if extract_dir is None:
-        extract_dir = os.getcwd()
-
-    if format is not None:
-        try:
-            format_info = _UNPACK_FORMATS[format]
-        except KeyError:
-            raise ValueError("Unknown unpack format '{0}'".format(format))
-
-        func = format_info[1]
-        func(filename, extract_dir, **dict(format_info[2]))
-    else:
-        # we need to look at the registered unpackers supported extensions
-        format = _find_unpack_format(filename)
-        if format is None:
-            raise ReadError("Unknown archive format '{0}'".format(filename))
-
-        func = _UNPACK_FORMATS[format][1]
-        kwargs = dict(_UNPACK_FORMATS[format][2])
-        func(filename, extract_dir, **kwargs)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg
deleted file mode 100644
index 1746bd0..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg
+++ /dev/null
@@ -1,84 +0,0 @@
-[posix_prefix]
-# Configuration directories.  Some of these come straight out of the
-# configure script.  They are for implementing the other variables, not to
-# be used directly in [resource_locations].
-confdir = /etc
-datadir = /usr/share
-libdir = /usr/lib
-statedir = /var
-# User resource directory
-local = ~/.local/{distribution.name}
-
-stdlib = {base}/lib/python{py_version_short}
-platstdlib = {platbase}/lib/python{py_version_short}
-purelib = {base}/lib/python{py_version_short}/site-packages
-platlib = {platbase}/lib/python{py_version_short}/site-packages
-include = {base}/include/python{py_version_short}{abiflags}
-platinclude = {platbase}/include/python{py_version_short}{abiflags}
-data = {base}
-
-[posix_home]
-stdlib = {base}/lib/python
-platstdlib = {base}/lib/python
-purelib = {base}/lib/python
-platlib = {base}/lib/python
-include = {base}/include/python
-platinclude = {base}/include/python
-scripts = {base}/bin
-data = {base}
-
-[nt]
-stdlib = {base}/Lib
-platstdlib = {base}/Lib
-purelib = {base}/Lib/site-packages
-platlib = {base}/Lib/site-packages
-include = {base}/Include
-platinclude = {base}/Include
-scripts = {base}/Scripts
-data = {base}
-
-[os2]
-stdlib = {base}/Lib
-platstdlib = {base}/Lib
-purelib = {base}/Lib/site-packages
-platlib = {base}/Lib/site-packages
-include = {base}/Include
-platinclude = {base}/Include
-scripts = {base}/Scripts
-data = {base}
-
-[os2_home]
-stdlib = {userbase}/lib/python{py_version_short}
-platstdlib = {userbase}/lib/python{py_version_short}
-purelib = {userbase}/lib/python{py_version_short}/site-packages
-platlib = {userbase}/lib/python{py_version_short}/site-packages
-include = {userbase}/include/python{py_version_short}
-scripts = {userbase}/bin
-data = {userbase}
-
-[nt_user]
-stdlib = {userbase}/Python{py_version_nodot}
-platstdlib = {userbase}/Python{py_version_nodot}
-purelib = {userbase}/Python{py_version_nodot}/site-packages
-platlib = {userbase}/Python{py_version_nodot}/site-packages
-include = {userbase}/Python{py_version_nodot}/Include
-scripts = {userbase}/Scripts
-data = {userbase}
-
-[posix_user]
-stdlib = {userbase}/lib/python{py_version_short}
-platstdlib = {userbase}/lib/python{py_version_short}
-purelib = {userbase}/lib/python{py_version_short}/site-packages
-platlib = {userbase}/lib/python{py_version_short}/site-packages
-include = {userbase}/include/python{py_version_short}
-scripts = {userbase}/bin
-data = {userbase}
-
-[osx_framework_user]
-stdlib = {userbase}/lib/python
-platstdlib = {userbase}/lib/python
-purelib = {userbase}/lib/python/site-packages
-platlib = {userbase}/lib/python/site-packages
-include = {userbase}/include
-scripts = {userbase}/bin
-data = {userbase}
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
deleted file mode 100644
index 1df3aba..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
+++ /dev/null
@@ -1,788 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""Access to Python's configuration information."""
-
-import codecs
-import os
-import re
-import sys
-from os.path import pardir, realpath
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
-
-
-__all__ = [
-    'get_config_h_filename',
-    'get_config_var',
-    'get_config_vars',
-    'get_makefile_filename',
-    'get_path',
-    'get_path_names',
-    'get_paths',
-    'get_platform',
-    'get_python_version',
-    'get_scheme_names',
-    'parse_config_h',
-]
-
-
-def _safe_realpath(path):
-    try:
-        return realpath(path)
-    except OSError:
-        return path
-
-
-if sys.executable:
-    _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
-else:
-    # sys.executable can be empty if argv[0] has been changed and Python is
-    # unable to retrieve the real program name
-    _PROJECT_BASE = _safe_realpath(os.getcwd())
-
-if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
-    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
-# PC/VS7.1
-if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
-    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
-# PC/AMD64
-if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
-    _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
-
-
-def is_python_build():
-    for fn in ("Setup.dist", "Setup.local"):
-        if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
-            return True
-    return False
-
-_PYTHON_BUILD = is_python_build()
-
-_cfg_read = False
-
-def _ensure_cfg_read():
-    global _cfg_read
-    if not _cfg_read:
-        from ..resources import finder
-        backport_package = __name__.rsplit('.', 1)[0]
-        _finder = finder(backport_package)
-        _cfgfile = _finder.find('sysconfig.cfg')
-        assert _cfgfile, 'sysconfig.cfg exists'
-        with _cfgfile.as_stream() as s:
-            _SCHEMES.readfp(s)
-        if _PYTHON_BUILD:
-            for scheme in ('posix_prefix', 'posix_home'):
-                _SCHEMES.set(scheme, 'include', '{srcdir}/Include')
-                _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
-
-        _cfg_read = True
-
-
-_SCHEMES = configparser.RawConfigParser()
-_VAR_REPL = re.compile(r'\{([^{]*?)\}')
-
-def _expand_globals(config):
-    _ensure_cfg_read()
-    if config.has_section('globals'):
-        globals = config.items('globals')
-    else:
-        globals = tuple()
-
-    sections = config.sections()
-    for section in sections:
-        if section == 'globals':
-            continue
-        for option, value in globals:
-            if config.has_option(section, option):
-                continue
-            config.set(section, option, value)
-    config.remove_section('globals')
-
-    # now expanding local variables defined in the cfg file
-    #
-    for section in config.sections():
-        variables = dict(config.items(section))
-
-        def _replacer(matchobj):
-            name = matchobj.group(1)
-            if name in variables:
-                return variables[name]
-            return matchobj.group(0)
-
-        for option, value in config.items(section):
-            config.set(section, option, _VAR_REPL.sub(_replacer, value))
-
-#_expand_globals(_SCHEMES)
-
- # FIXME don't rely on sys.version here, its format is an implementation detail
- # of CPython, use sys.version_info or sys.hexversion
-_PY_VERSION = sys.version.split()[0]
-_PY_VERSION_SHORT = sys.version[:3]
-_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2]
-_PREFIX = os.path.normpath(sys.prefix)
-_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
-_CONFIG_VARS = None
-_USER_BASE = None
-
-
-def _subst_vars(path, local_vars):
-    """In the string `path`, replace tokens like {some.thing} with the
-    corresponding value from the map `local_vars`.
-
-    If there is no corresponding value, leave the token unchanged.
-    """
-    def _replacer(matchobj):
-        name = matchobj.group(1)
-        if name in local_vars:
-            return local_vars[name]
-        elif name in os.environ:
-            return os.environ[name]
-        return matchobj.group(0)
-    return _VAR_REPL.sub(_replacer, path)
-
-
-def _extend_dict(target_dict, other_dict):
-    target_keys = target_dict.keys()
-    for key, value in other_dict.items():
-        if key in target_keys:
-            continue
-        target_dict[key] = value
-
-
-def _expand_vars(scheme, vars):
-    res = {}
-    if vars is None:
-        vars = {}
-    _extend_dict(vars, get_config_vars())
-
-    for key, value in _SCHEMES.items(scheme):
-        if os.name in ('posix', 'nt'):
-            value = os.path.expanduser(value)
-        res[key] = os.path.normpath(_subst_vars(value, vars))
-    return res
-
-
-def format_value(value, vars):
-    def _replacer(matchobj):
-        name = matchobj.group(1)
-        if name in vars:
-            return vars[name]
-        return matchobj.group(0)
-    return _VAR_REPL.sub(_replacer, value)
-
-
-def _get_default_scheme():
-    if os.name == 'posix':
-        # the default scheme for posix is posix_prefix
-        return 'posix_prefix'
-    return os.name
-
-
-def _getuserbase():
-    env_base = os.environ.get("PYTHONUSERBASE", None)
-
-    def joinuser(*args):
-        return os.path.expanduser(os.path.join(*args))
-
-    # what about 'os2emx', 'riscos' ?
-    if os.name == "nt":
-        base = os.environ.get("APPDATA") or "~"
-        if env_base:
-            return env_base
-        else:
-            return joinuser(base, "Python")
-
-    if sys.platform == "darwin":
-        framework = get_config_var("PYTHONFRAMEWORK")
-        if framework:
-            if env_base:
-                return env_base
-            else:
-                return joinuser("~", "Library", framework, "%d.%d" %
-                                sys.version_info[:2])
-
-    if env_base:
-        return env_base
-    else:
-        return joinuser("~", ".local")
-
-
-def _parse_makefile(filename, vars=None):
-    """Parse a Makefile-style file.
-
-    A dictionary containing name/value pairs is returned.  If an
-    optional dictionary is passed in as the second argument, it is
-    used instead of a new dictionary.
-    """
-    # Regexes needed for parsing Makefile (and similar syntaxes,
-    # like old-style Setup files).
-    _variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
-    _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
-    _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
-
-    if vars is None:
-        vars = {}
-    done = {}
-    notdone = {}
-
-    with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
-        lines = f.readlines()
-
-    for line in lines:
-        if line.startswith('#') or line.strip() == '':
-            continue
-        m = _variable_rx.match(line)
-        if m:
-            n, v = m.group(1, 2)
-            v = v.strip()
-            # `$$' is a literal `$' in make
-            tmpv = v.replace('$$', '')
-
-            if "$" in tmpv:
-                notdone[n] = v
-            else:
-                try:
-                    v = int(v)
-                except ValueError:
-                    # insert literal `$'
-                    done[n] = v.replace('$$', '$')
-                else:
-                    done[n] = v
-
-    # do variable interpolation here
-    variables = list(notdone.keys())
-
-    # Variables with a 'PY_' prefix in the makefile. These need to
-    # be made available without that prefix through sysconfig.
-    # Special care is needed to ensure that variable expansion works, even
-    # if the expansion uses the name without a prefix.
-    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
-
-    while len(variables) > 0:
-        for name in tuple(variables):
-            value = notdone[name]
-            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
-            if m is not None:
-                n = m.group(1)
-                found = True
-                if n in done:
-                    item = str(done[n])
-                elif n in notdone:
-                    # get it on a subsequent round
-                    found = False
-                elif n in os.environ:
-                    # do it like make: fall back to environment
-                    item = os.environ[n]
-
-                elif n in renamed_variables:
-                    if (name.startswith('PY_') and
-                        name[3:] in renamed_variables):
-                        item = ""
-
-                    elif 'PY_' + n in notdone:
-                        found = False
-
-                    else:
-                        item = str(done['PY_' + n])
-
-                else:
-                    done[n] = item = ""
-
-                if found:
-                    after = value[m.end():]
-                    value = value[:m.start()] + item + after
-                    if "$" in after:
-                        notdone[name] = value
-                    else:
-                        try:
-                            value = int(value)
-                        except ValueError:
-                            done[name] = value.strip()
-                        else:
-                            done[name] = value
-                        variables.remove(name)
-
-                        if (name.startswith('PY_') and
-                            name[3:] in renamed_variables):
-
-                            name = name[3:]
-                            if name not in done:
-                                done[name] = value
-
-            else:
-                # bogus variable reference (e.g. "prefix=$/opt/python");
-                # just drop it since we can't deal
-                done[name] = value
-                variables.remove(name)
-
-    # strip spurious spaces
-    for k, v in done.items():
-        if isinstance(v, str):
-            done[k] = v.strip()
-
-    # save the results in the global dictionary
-    vars.update(done)
-    return vars
-
-
-def get_makefile_filename():
-    """Return the path of the Makefile."""
-    if _PYTHON_BUILD:
-        return os.path.join(_PROJECT_BASE, "Makefile")
-    if hasattr(sys, 'abiflags'):
-        config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
-    else:
-        config_dir_name = 'config'
-    return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
-
-
-def _init_posix(vars):
-    """Initialize the module as appropriate for POSIX systems."""
-    # load the installed Makefile:
-    makefile = get_makefile_filename()
-    try:
-        _parse_makefile(makefile, vars)
-    except IOError as e:
-        msg = "invalid Python installation: unable to open %s" % makefile
-        if hasattr(e, "strerror"):
-            msg = msg + " (%s)" % e.strerror
-        raise IOError(msg)
-    # load the installed pyconfig.h:
-    config_h = get_config_h_filename()
-    try:
-        with open(config_h) as f:
-            parse_config_h(f, vars)
-    except IOError as e:
-        msg = "invalid Python installation: unable to open %s" % config_h
-        if hasattr(e, "strerror"):
-            msg = msg + " (%s)" % e.strerror
-        raise IOError(msg)
-    # On AIX, there are wrong paths to the linker scripts in the Makefile
-    # -- these paths are relative to the Python source, but when installed
-    # the scripts are in another directory.
-    if _PYTHON_BUILD:
-        vars['LDSHARED'] = vars['BLDSHARED']
-
-
-def _init_non_posix(vars):
-    """Initialize the module as appropriate for NT"""
-    # set basic install directories
-    vars['LIBDEST'] = get_path('stdlib')
-    vars['BINLIBDEST'] = get_path('platstdlib')
-    vars['INCLUDEPY'] = get_path('include')
-    vars['SO'] = '.pyd'
-    vars['EXE'] = '.exe'
-    vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
-    vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
-
-#
-# public APIs
-#
-
-
-def parse_config_h(fp, vars=None):
-    """Parse a config.h-style file.
-
-    A dictionary containing name/value pairs is returned.  If an
-    optional dictionary is passed in as the second argument, it is
-    used instead of a new dictionary.
-    """
-    if vars is None:
-        vars = {}
-    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
-    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
-
-    while True:
-        line = fp.readline()
-        if not line:
-            break
-        m = define_rx.match(line)
-        if m:
-            n, v = m.group(1, 2)
-            try:
-                v = int(v)
-            except ValueError:
-                pass
-            vars[n] = v
-        else:
-            m = undef_rx.match(line)
-            if m:
-                vars[m.group(1)] = 0
-    return vars
-
-
-def get_config_h_filename():
-    """Return the path of pyconfig.h."""
-    if _PYTHON_BUILD:
-        if os.name == "nt":
-            inc_dir = os.path.join(_PROJECT_BASE, "PC")
-        else:
-            inc_dir = _PROJECT_BASE
-    else:
-        inc_dir = get_path('platinclude')
-    return os.path.join(inc_dir, 'pyconfig.h')
-
-
-def get_scheme_names():
-    """Return a tuple containing the schemes names."""
-    return tuple(sorted(_SCHEMES.sections()))
-
-
-def get_path_names():
-    """Return a tuple containing the paths names."""
-    # xxx see if we want a static list
-    return _SCHEMES.options('posix_prefix')
-
-
-def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
-    """Return a mapping containing an install scheme.
-
-    ``scheme`` is the install scheme name. If not provided, it will
-    return the default scheme for the current platform.
-    """
-    _ensure_cfg_read()
-    if expand:
-        return _expand_vars(scheme, vars)
-    else:
-        return dict(_SCHEMES.items(scheme))
-
-
-def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
-    """Return a path corresponding to the scheme.
-
-    ``scheme`` is the install scheme name.
-    """
-    return get_paths(scheme, vars, expand)[name]
-
-
-def get_config_vars(*args):
-    """With no arguments, return a dictionary of all configuration
-    variables relevant for the current platform.
-
-    On Unix, this means every variable defined in Python's installed Makefile;
-    On Windows and Mac OS it's a much smaller set.
-
-    With arguments, return a list of values that result from looking up
-    each argument in the configuration variable dictionary.
-    """
-    global _CONFIG_VARS
-    if _CONFIG_VARS is None:
-        _CONFIG_VARS = {}
-        # Normalized versions of prefix and exec_prefix are handy to have;
-        # in fact, these are the standard versions used most places in the
-        # distutils2 module.
-        _CONFIG_VARS['prefix'] = _PREFIX
-        _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
-        _CONFIG_VARS['py_version'] = _PY_VERSION
-        _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
-        _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
-        _CONFIG_VARS['base'] = _PREFIX
-        _CONFIG_VARS['platbase'] = _EXEC_PREFIX
-        _CONFIG_VARS['projectbase'] = _PROJECT_BASE
-        try:
-            _CONFIG_VARS['abiflags'] = sys.abiflags
-        except AttributeError:
-            # sys.abiflags may not be defined on all platforms.
-            _CONFIG_VARS['abiflags'] = ''
-
-        if os.name in ('nt', 'os2'):
-            _init_non_posix(_CONFIG_VARS)
-        if os.name == 'posix':
-            _init_posix(_CONFIG_VARS)
-        # Setting 'userbase' is done below the call to the
-        # init function to enable using 'get_config_var' in
-        # the init-function.
-        if sys.version >= '2.6':
-            _CONFIG_VARS['userbase'] = _getuserbase()
-
-        if 'srcdir' not in _CONFIG_VARS:
-            _CONFIG_VARS['srcdir'] = _PROJECT_BASE
-        else:
-            _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
-
-        # Convert srcdir into an absolute path if it appears necessary.
-        # Normally it is relative to the build directory.  However, during
-        # testing, for example, we might be running a non-installed python
-        # from a different directory.
-        if _PYTHON_BUILD and os.name == "posix":
-            base = _PROJECT_BASE
-            try:
-                cwd = os.getcwd()
-            except OSError:
-                cwd = None
-            if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
-                base != cwd):
-                # srcdir is relative and we are not in the same directory
-                # as the executable. Assume executable is in the build
-                # directory and make srcdir absolute.
-                srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
-                _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
-
-        if sys.platform == 'darwin':
-            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
-            major_version = int(kernel_version.split('.')[0])
-
-            if major_version < 8:
-                # On Mac OS X before 10.4, check if -arch and -isysroot
-                # are in CFLAGS or LDFLAGS and remove them if they are.
-                # This is needed when building extensions on a 10.3 system
-                # using a universal build of python.
-                for key in ('LDFLAGS', 'BASECFLAGS',
-                        # a number of derived variables. These need to be
-                        # patched up as well.
-                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-                    flags = _CONFIG_VARS[key]
-                    flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
-                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
-                    _CONFIG_VARS[key] = flags
-            else:
-                # Allow the user to override the architecture flags using
-                # an environment variable.
-                # NOTE: This name was introduced by Apple in OSX 10.5 and
-                # is used by several scripting languages distributed with
-                # that OS release.
-                if 'ARCHFLAGS' in os.environ:
-                    arch = os.environ['ARCHFLAGS']
-                    for key in ('LDFLAGS', 'BASECFLAGS',
-                        # a number of derived variables. These need to be
-                        # patched up as well.
-                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
-                        flags = _CONFIG_VARS[key]
-                        flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
-                        flags = flags + ' ' + arch
-                        _CONFIG_VARS[key] = flags
-
-                # If we're on OSX 10.5 or later and the user tries to
-                # compiles an extension using an SDK that is not present
-                # on the current machine it is better to not use an SDK
-                # than to fail.
-                #
-                # The major usecase for this is users using a Python.org
-                # binary installer  on OSX 10.6: that installer uses
-                # the 10.4u SDK, but that SDK is not installed by default
-                # when you install Xcode.
-                #
-                CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
-                m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
-                if m is not None:
-                    sdk = m.group(1)
-                    if not os.path.exists(sdk):
-                        for key in ('LDFLAGS', 'BASECFLAGS',
-                             # a number of derived variables. These need to be
-                             # patched up as well.
-                            'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
-                            flags = _CONFIG_VARS[key]
-                            flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
-                            _CONFIG_VARS[key] = flags
-
-    if args:
-        vals = []
-        for name in args:
-            vals.append(_CONFIG_VARS.get(name))
-        return vals
-    else:
-        return _CONFIG_VARS
-
-
-def get_config_var(name):
-    """Return the value of a single variable using the dictionary returned by
-    'get_config_vars()'.
-
-    Equivalent to get_config_vars().get(name)
-    """
-    return get_config_vars().get(name)
-
-
-def get_platform():
-    """Return a string that identifies the current platform.
-
-    This is used mainly to distinguish platform-specific build directories and
-    platform-specific built distributions.  Typically includes the OS name
-    and version and the architecture (as supplied by 'os.uname()'),
-    although the exact information included depends on the OS; eg. for IRIX
-    the architecture isn't particularly important (IRIX only runs on SGI
-    hardware), but for Linux the kernel version isn't particularly
-    important.
-
-    Examples of returned values:
-       linux-i586
-       linux-alpha (?)
-       solaris-2.6-sun4u
-       irix-5.3
-       irix64-6.2
-
-    Windows will return one of:
-       win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
-       win-ia64 (64bit Windows on Itanium)
-       win32 (all others - specifically, sys.platform is returned)
-
-    For other non-POSIX platforms, currently just returns 'sys.platform'.
-    """
-    if os.name == 'nt':
-        # sniff sys.version for architecture.
-        prefix = " bit ("
-        i = sys.version.find(prefix)
-        if i == -1:
-            return sys.platform
-        j = sys.version.find(")", i)
-        look = sys.version[i+len(prefix):j].lower()
-        if look == 'amd64':
-            return 'win-amd64'
-        if look == 'itanium':
-            return 'win-ia64'
-        return sys.platform
-
-    if os.name != "posix" or not hasattr(os, 'uname'):
-        # XXX what about the architecture? NT is Intel or Alpha,
-        # Mac OS is M68k or PPC, etc.
-        return sys.platform
-
-    # Try to distinguish various flavours of Unix
-    osname, host, release, version, machine = os.uname()
-
-    # Convert the OS name to lowercase, remove '/' characters
-    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
-    osname = osname.lower().replace('/', '')
-    machine = machine.replace(' ', '_')
-    machine = machine.replace('/', '-')
-
-    if osname[:5] == "linux":
-        # At least on Linux/Intel, 'machine' is the processor --
-        # i386, etc.
-        # XXX what about Alpha, SPARC, etc?
-        return  "%s-%s" % (osname, machine)
-    elif osname[:5] == "sunos":
-        if release[0] >= "5":           # SunOS 5 == Solaris 2
-            osname = "solaris"
-            release = "%d.%s" % (int(release[0]) - 3, release[2:])
-        # fall through to standard osname-release-machine representation
-    elif osname[:4] == "irix":              # could be "irix64"!
-        return "%s-%s" % (osname, release)
-    elif osname[:3] == "aix":
-        return "%s-%s.%s" % (osname, version, release)
-    elif osname[:6] == "cygwin":
-        osname = "cygwin"
-        rel_re = re.compile(r'[\d.]+')
-        m = rel_re.match(release)
-        if m:
-            release = m.group()
-    elif osname[:6] == "darwin":
-        #
-        # For our purposes, we'll assume that the system version from
-        # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
-        # to. This makes the compatibility story a bit more sane because the
-        # machine is going to compile and link as if it were
-        # MACOSX_DEPLOYMENT_TARGET.
-        cfgvars = get_config_vars()
-        macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
-
-        if True:
-            # Always calculate the release of the running machine,
-            # needed to determine if we can build fat binaries or not.
-
-            macrelease = macver
-            # Get the system version. Reading this plist is a documented
-            # way to get the system version (see the documentation for
-            # the Gestalt Manager)
-            try:
-                f = open('/System/Library/CoreServices/SystemVersion.plist')
-            except IOError:
-                # We're on a plain darwin box, fall back to the default
-                # behaviour.
-                pass
-            else:
-                try:
-                    m = re.search(r'ProductUserVisibleVersion\s*'
-                                  r'(.*?)', f.read())
-                finally:
-                    f.close()
-                if m is not None:
-                    macrelease = '.'.join(m.group(1).split('.')[:2])
-                # else: fall back to the default behaviour
-
-        if not macver:
-            macver = macrelease
-
-        if macver:
-            release = macver
-            osname = "macosx"
-
-            if ((macrelease + '.') >= '10.4.' and
-                '-arch' in get_config_vars().get('CFLAGS', '').strip()):
-                # The universal build will build fat binaries, but not on
-                # systems before 10.4
-                #
-                # Try to detect 4-way universal builds, those have machine-type
-                # 'universal' instead of 'fat'.
-
-                machine = 'fat'
-                cflags = get_config_vars().get('CFLAGS')
-
-                archs = re.findall(r'-arch\s+(\S+)', cflags)
-                archs = tuple(sorted(set(archs)))
-
-                if len(archs) == 1:
-                    machine = archs[0]
-                elif archs == ('i386', 'ppc'):
-                    machine = 'fat'
-                elif archs == ('i386', 'x86_64'):
-                    machine = 'intel'
-                elif archs == ('i386', 'ppc', 'x86_64'):
-                    machine = 'fat3'
-                elif archs == ('ppc64', 'x86_64'):
-                    machine = 'fat64'
-                elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
-                    machine = 'universal'
-                else:
-                    raise ValueError(
-                       "Don't know machine value for archs=%r" % (archs,))
-
-            elif machine == 'i386':
-                # On OSX the machine type returned by uname is always the
-                # 32-bit variant, even if the executable architecture is
-                # the 64-bit variant
-                if sys.maxsize >= 2**32:
-                    machine = 'x86_64'
-
-            elif machine in ('PowerPC', 'Power_Macintosh'):
-                # Pick a sane name for the PPC architecture.
-                # See 'i386' case
-                if sys.maxsize >= 2**32:
-                    machine = 'ppc64'
-                else:
-                    machine = 'ppc'
-
-    return "%s-%s-%s" % (osname, release, machine)
-
-
-def get_python_version():
-    return _PY_VERSION_SHORT
-
-
-def _print_dict(title, data):
-    for index, (key, value) in enumerate(sorted(data.items())):
-        if index == 0:
-            print('%s: ' % (title))
-        print('\t%s = "%s"' % (key, value))
-
-
-def _main():
-    """Display all information sysconfig detains."""
-    print('Platform: "%s"' % get_platform())
-    print('Python version: "%s"' % get_python_version())
-    print('Current installation scheme: "%s"' % _get_default_scheme())
-    print()
-    _print_dict('Paths', get_paths())
-    print()
-    _print_dict('Variables', get_config_vars())
-
-
-if __name__ == '__main__':
-    _main()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
deleted file mode 100644
index d66d856..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
+++ /dev/null
@@ -1,2607 +0,0 @@
-#-------------------------------------------------------------------
-# tarfile.py
-#-------------------------------------------------------------------
-# Copyright (C) 2002 Lars Gustaebel 
-# All rights reserved.
-#
-# Permission  is  hereby granted,  free  of charge,  to  any person
-# obtaining a  copy of  this software  and associated documentation
-# files  (the  "Software"),  to   deal  in  the  Software   without
-# restriction,  including  without limitation  the  rights to  use,
-# copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies  of  the  Software,  and to  permit  persons  to  whom the
-# Software  is  furnished  to  do  so,  subject  to  the  following
-# conditions:
-#
-# The above copyright  notice and this  permission notice shall  be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
-# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
-# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
-# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
-# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
-# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-from __future__ import print_function
-
-"""Read from and write to tar format archives.
-"""
-
-__version__ = "$Revision$"
-
-version     = "0.9.0"
-__author__  = "Lars Gust\u00e4bel (lars@gustaebel.de)"
-__date__    = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
-__cvsid__   = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
-__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
-
-#---------
-# Imports
-#---------
-import sys
-import os
-import stat
-import errno
-import time
-import struct
-import copy
-import re
-
-try:
-    import grp, pwd
-except ImportError:
-    grp = pwd = None
-
-# os.symlink on Windows prior to 6.0 raises NotImplementedError
-symlink_exception = (AttributeError, NotImplementedError)
-try:
-    # WindowsError (1314) will be raised if the caller does not hold the
-    # SeCreateSymbolicLinkPrivilege privilege
-    symlink_exception += (WindowsError,)
-except NameError:
-    pass
-
-# from tarfile import *
-__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
-
-if sys.version_info[0] < 3:
-    import __builtin__ as builtins
-else:
-    import builtins
-
-_open = builtins.open   # Since 'open' is TarFile.open
-
-#---------------------------------------------------------
-# tar constants
-#---------------------------------------------------------
-NUL = b"\0"                     # the null character
-BLOCKSIZE = 512                 # length of processing blocks
-RECORDSIZE = BLOCKSIZE * 20     # length of records
-GNU_MAGIC = b"ustar  \0"        # magic gnu tar string
-POSIX_MAGIC = b"ustar\x0000"    # magic posix tar string
-
-LENGTH_NAME = 100               # maximum length of a filename
-LENGTH_LINK = 100               # maximum length of a linkname
-LENGTH_PREFIX = 155             # maximum length of the prefix field
-
-REGTYPE = b"0"                  # regular file
-AREGTYPE = b"\0"                # regular file
-LNKTYPE = b"1"                  # link (inside tarfile)
-SYMTYPE = b"2"                  # symbolic link
-CHRTYPE = b"3"                  # character special device
-BLKTYPE = b"4"                  # block special device
-DIRTYPE = b"5"                  # directory
-FIFOTYPE = b"6"                 # fifo special device
-CONTTYPE = b"7"                 # contiguous file
-
-GNUTYPE_LONGNAME = b"L"         # GNU tar longname
-GNUTYPE_LONGLINK = b"K"         # GNU tar longlink
-GNUTYPE_SPARSE = b"S"           # GNU tar sparse file
-
-XHDTYPE = b"x"                  # POSIX.1-2001 extended header
-XGLTYPE = b"g"                  # POSIX.1-2001 global header
-SOLARIS_XHDTYPE = b"X"          # Solaris extended header
-
-USTAR_FORMAT = 0                # POSIX.1-1988 (ustar) format
-GNU_FORMAT = 1                  # GNU tar format
-PAX_FORMAT = 2                  # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = GNU_FORMAT
-
-#---------------------------------------------------------
-# tarfile constants
-#---------------------------------------------------------
-# File types that tarfile supports:
-SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
-                   SYMTYPE, DIRTYPE, FIFOTYPE,
-                   CONTTYPE, CHRTYPE, BLKTYPE,
-                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-                   GNUTYPE_SPARSE)
-
-# File types that will be treated as a regular file.
-REGULAR_TYPES = (REGTYPE, AREGTYPE,
-                 CONTTYPE, GNUTYPE_SPARSE)
-
-# File types that are part of the GNU tar format.
-GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
-             GNUTYPE_SPARSE)
-
-# Fields from a pax header that override a TarInfo attribute.
-PAX_FIELDS = ("path", "linkpath", "size", "mtime",
-              "uid", "gid", "uname", "gname")
-
-# Fields from a pax header that are affected by hdrcharset.
-PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))
-
-# Fields in a pax header that are numbers, all other fields
-# are treated as strings.
-PAX_NUMBER_FIELDS = {
-    "atime": float,
-    "ctime": float,
-    "mtime": float,
-    "uid": int,
-    "gid": int,
-    "size": int
-}
-
-#---------------------------------------------------------
-# Bits used in the mode field, values in octal.
-#---------------------------------------------------------
-S_IFLNK = 0o120000        # symbolic link
-S_IFREG = 0o100000        # regular file
-S_IFBLK = 0o060000        # block device
-S_IFDIR = 0o040000        # directory
-S_IFCHR = 0o020000        # character device
-S_IFIFO = 0o010000        # fifo
-
-TSUID   = 0o4000          # set UID on execution
-TSGID   = 0o2000          # set GID on execution
-TSVTX   = 0o1000          # reserved
-
-TUREAD  = 0o400           # read by owner
-TUWRITE = 0o200           # write by owner
-TUEXEC  = 0o100           # execute/search by owner
-TGREAD  = 0o040           # read by group
-TGWRITE = 0o020           # write by group
-TGEXEC  = 0o010           # execute/search by group
-TOREAD  = 0o004           # read by other
-TOWRITE = 0o002           # write by other
-TOEXEC  = 0o001           # execute/search by other
-
-#---------------------------------------------------------
-# initialization
-#---------------------------------------------------------
-if os.name in ("nt", "ce"):
-    ENCODING = "utf-8"
-else:
-    ENCODING = sys.getfilesystemencoding()
-
-#---------------------------------------------------------
-# Some useful functions
-#---------------------------------------------------------
-
-def stn(s, length, encoding, errors):
-    """Convert a string to a null-terminated bytes object.
-    """
-    s = s.encode(encoding, errors)
-    return s[:length] + (length - len(s)) * NUL
-
-def nts(s, encoding, errors):
-    """Convert a null-terminated bytes object to a string.
-    """
-    p = s.find(b"\0")
-    if p != -1:
-        s = s[:p]
-    return s.decode(encoding, errors)
-
-def nti(s):
-    """Convert a number field to a python number.
-    """
-    # There are two possible encodings for a number field, see
-    # itn() below.
-    if s[0] != chr(0o200):
-        try:
-            n = int(nts(s, "ascii", "strict") or "0", 8)
-        except ValueError:
-            raise InvalidHeaderError("invalid header")
-    else:
-        n = 0
-        for i in range(len(s) - 1):
-            n <<= 8
-            n += ord(s[i + 1])
-    return n
-
-def itn(n, digits=8, format=DEFAULT_FORMAT):
-    """Convert a python number to a number field.
-    """
-    # POSIX 1003.1-1988 requires numbers to be encoded as a string of
-    # octal digits followed by a null-byte, this allows values up to
-    # (8**(digits-1))-1. GNU tar allows storing numbers greater than
-    # that if necessary. A leading 0o200 byte indicates this particular
-    # encoding, the following digits-1 bytes are a big-endian
-    # representation. This allows values up to (256**(digits-1))-1.
-    if 0 <= n < 8 ** (digits - 1):
-        s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL
-    else:
-        if format != GNU_FORMAT or n >= 256 ** (digits - 1):
-            raise ValueError("overflow in number field")
-
-        if n < 0:
-            # XXX We mimic GNU tar's behaviour with negative numbers,
-            # this could raise OverflowError.
-            n = struct.unpack("L", struct.pack("l", n))[0]
-
-        s = bytearray()
-        for i in range(digits - 1):
-            s.insert(0, n & 0o377)
-            n >>= 8
-        s.insert(0, 0o200)
-    return s
-
-def calc_chksums(buf):
-    """Calculate the checksum for a member's header by summing up all
-       characters except for the chksum field which is treated as if
-       it was filled with spaces. According to the GNU tar sources,
-       some tars (Sun and NeXT) calculate chksum with signed char,
-       which will be different if there are chars in the buffer with
-       the high bit set. So we calculate two checksums, unsigned and
-       signed.
-    """
-    unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
-    signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
-    return unsigned_chksum, signed_chksum
-
-def copyfileobj(src, dst, length=None):
-    """Copy length bytes from fileobj src to fileobj dst.
-       If length is None, copy the entire content.
-    """
-    if length == 0:
-        return
-    if length is None:
-        while True:
-            buf = src.read(16*1024)
-            if not buf:
-                break
-            dst.write(buf)
-        return
-
-    BUFSIZE = 16 * 1024
-    blocks, remainder = divmod(length, BUFSIZE)
-    for b in range(blocks):
-        buf = src.read(BUFSIZE)
-        if len(buf) < BUFSIZE:
-            raise IOError("end of file reached")
-        dst.write(buf)
-
-    if remainder != 0:
-        buf = src.read(remainder)
-        if len(buf) < remainder:
-            raise IOError("end of file reached")
-        dst.write(buf)
-    return
-
-filemode_table = (
-    ((S_IFLNK,      "l"),
-     (S_IFREG,      "-"),
-     (S_IFBLK,      "b"),
-     (S_IFDIR,      "d"),
-     (S_IFCHR,      "c"),
-     (S_IFIFO,      "p")),
-
-    ((TUREAD,       "r"),),
-    ((TUWRITE,      "w"),),
-    ((TUEXEC|TSUID, "s"),
-     (TSUID,        "S"),
-     (TUEXEC,       "x")),
-
-    ((TGREAD,       "r"),),
-    ((TGWRITE,      "w"),),
-    ((TGEXEC|TSGID, "s"),
-     (TSGID,        "S"),
-     (TGEXEC,       "x")),
-
-    ((TOREAD,       "r"),),
-    ((TOWRITE,      "w"),),
-    ((TOEXEC|TSVTX, "t"),
-     (TSVTX,        "T"),
-     (TOEXEC,       "x"))
-)
-
-def filemode(mode):
-    """Convert a file's mode to a string of the form
-       -rwxrwxrwx.
-       Used by TarFile.list()
-    """
-    perm = []
-    for table in filemode_table:
-        for bit, char in table:
-            if mode & bit == bit:
-                perm.append(char)
-                break
-        else:
-            perm.append("-")
-    return "".join(perm)
-
-class TarError(Exception):
-    """Base exception."""
-    pass
-class ExtractError(TarError):
-    """General exception for extract errors."""
-    pass
-class ReadError(TarError):
-    """Exception for unreadable tar archives."""
-    pass
-class CompressionError(TarError):
-    """Exception for unavailable compression methods."""
-    pass
-class StreamError(TarError):
-    """Exception for unsupported operations on stream-like TarFiles."""
-    pass
-class HeaderError(TarError):
-    """Base exception for header errors."""
-    pass
-class EmptyHeaderError(HeaderError):
-    """Exception for empty headers."""
-    pass
-class TruncatedHeaderError(HeaderError):
-    """Exception for truncated headers."""
-    pass
-class EOFHeaderError(HeaderError):
-    """Exception for end of file headers."""
-    pass
-class InvalidHeaderError(HeaderError):
-    """Exception for invalid headers."""
-    pass
-class SubsequentHeaderError(HeaderError):
-    """Exception for missing and invalid extended headers."""
-    pass
-
-#---------------------------
-# internal stream interface
-#---------------------------
-class _LowLevelFile(object):
-    """Low-level file object. Supports reading and writing.
-       It is used instead of a regular file object for streaming
-       access.
-    """
-
-    def __init__(self, name, mode):
-        mode = {
-            "r": os.O_RDONLY,
-            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
-        }[mode]
-        if hasattr(os, "O_BINARY"):
-            mode |= os.O_BINARY
-        self.fd = os.open(name, mode, 0o666)
-
-    def close(self):
-        os.close(self.fd)
-
-    def read(self, size):
-        return os.read(self.fd, size)
-
-    def write(self, s):
-        os.write(self.fd, s)
-
-class _Stream(object):
-    """Class that serves as an adapter between TarFile and
-       a stream-like object.  The stream-like object only
-       needs to have a read() or write() method and is accessed
-       blockwise.  Use of gzip or bzip2 compression is possible.
-       A stream-like object could be for example: sys.stdin,
-       sys.stdout, a socket, a tape device etc.
-
-       _Stream is intended to be used only internally.
-    """
-
-    def __init__(self, name, mode, comptype, fileobj, bufsize):
-        """Construct a _Stream object.
-        """
-        self._extfileobj = True
-        if fileobj is None:
-            fileobj = _LowLevelFile(name, mode)
-            self._extfileobj = False
-
-        if comptype == '*':
-            # Enable transparent compression detection for the
-            # stream interface
-            fileobj = _StreamProxy(fileobj)
-            comptype = fileobj.getcomptype()
-
-        self.name     = name or ""
-        self.mode     = mode
-        self.comptype = comptype
-        self.fileobj  = fileobj
-        self.bufsize  = bufsize
-        self.buf      = b""
-        self.pos      = 0
-        self.closed   = False
-
-        try:
-            if comptype == "gz":
-                try:
-                    import zlib
-                except ImportError:
-                    raise CompressionError("zlib module is not available")
-                self.zlib = zlib
-                self.crc = zlib.crc32(b"")
-                if mode == "r":
-                    self._init_read_gz()
-                else:
-                    self._init_write_gz()
-
-            if comptype == "bz2":
-                try:
-                    import bz2
-                except ImportError:
-                    raise CompressionError("bz2 module is not available")
-                if mode == "r":
-                    self.dbuf = b""
-                    self.cmp = bz2.BZ2Decompressor()
-                else:
-                    self.cmp = bz2.BZ2Compressor()
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    def __del__(self):
-        if hasattr(self, "closed") and not self.closed:
-            self.close()
-
-    def _init_write_gz(self):
-        """Initialize for writing with gzip compression.
-        """
-        self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-                                            -self.zlib.MAX_WBITS,
-                                            self.zlib.DEF_MEM_LEVEL,
-                                            0)
-        timestamp = struct.pack(" self.bufsize:
-            self.fileobj.write(self.buf[:self.bufsize])
-            self.buf = self.buf[self.bufsize:]
-
-    def close(self):
-        """Close the _Stream object. No operation should be
-           done on it afterwards.
-        """
-        if self.closed:
-            return
-
-        if self.mode == "w" and self.comptype != "tar":
-            self.buf += self.cmp.flush()
-
-        if self.mode == "w" and self.buf:
-            self.fileobj.write(self.buf)
-            self.buf = b""
-            if self.comptype == "gz":
-                # The native zlib crc is an unsigned 32-bit integer, but
-                # the Python wrapper implicitly casts that to a signed C
-                # long.  So, on a 32-bit box self.crc may "look negative",
-                # while the same crc on a 64-bit box may "look positive".
-                # To avoid irksome warnings from the `struct` module, force
-                # it to look positive on all boxes.
-                self.fileobj.write(struct.pack("= 0:
-            blocks, remainder = divmod(pos - self.pos, self.bufsize)
-            for i in range(blocks):
-                self.read(self.bufsize)
-            self.read(remainder)
-        else:
-            raise StreamError("seeking backwards is not allowed")
-        return self.pos
-
-    def read(self, size=None):
-        """Return the next size number of bytes from the stream.
-           If size is not defined, return all bytes of the stream
-           up to EOF.
-        """
-        if size is None:
-            t = []
-            while True:
-                buf = self._read(self.bufsize)
-                if not buf:
-                    break
-                t.append(buf)
-            buf = "".join(t)
-        else:
-            buf = self._read(size)
-        self.pos += len(buf)
-        return buf
-
-    def _read(self, size):
-        """Return size bytes from the stream.
-        """
-        if self.comptype == "tar":
-            return self.__read(size)
-
-        c = len(self.dbuf)
-        while c < size:
-            buf = self.__read(self.bufsize)
-            if not buf:
-                break
-            try:
-                buf = self.cmp.decompress(buf)
-            except IOError:
-                raise ReadError("invalid compressed data")
-            self.dbuf += buf
-            c += len(buf)
-        buf = self.dbuf[:size]
-        self.dbuf = self.dbuf[size:]
-        return buf
-
-    def __read(self, size):
-        """Return size bytes from stream. If internal buffer is empty,
-           read another block from the stream.
-        """
-        c = len(self.buf)
-        while c < size:
-            buf = self.fileobj.read(self.bufsize)
-            if not buf:
-                break
-            self.buf += buf
-            c += len(buf)
-        buf = self.buf[:size]
-        self.buf = self.buf[size:]
-        return buf
-# class _Stream
-
-class _StreamProxy(object):
-    """Small proxy class that enables transparent compression
-       detection for the Stream interface (mode 'r|*').
-    """
-
-    def __init__(self, fileobj):
-        self.fileobj = fileobj
-        self.buf = self.fileobj.read(BLOCKSIZE)
-
-    def read(self, size):
-        self.read = self.fileobj.read
-        return self.buf
-
-    def getcomptype(self):
-        if self.buf.startswith(b"\037\213\010"):
-            return "gz"
-        if self.buf.startswith(b"BZh91"):
-            return "bz2"
-        return "tar"
-
-    def close(self):
-        self.fileobj.close()
-# class StreamProxy
-
-class _BZ2Proxy(object):
-    """Small proxy class that enables external file object
-       support for "r:bz2" and "w:bz2" modes. This is actually
-       a workaround for a limitation in bz2 module's BZ2File
-       class which (unlike gzip.GzipFile) has no support for
-       a file object argument.
-    """
-
-    blocksize = 16 * 1024
-
-    def __init__(self, fileobj, mode):
-        self.fileobj = fileobj
-        self.mode = mode
-        self.name = getattr(self.fileobj, "name", None)
-        self.init()
-
-    def init(self):
-        import bz2
-        self.pos = 0
-        if self.mode == "r":
-            self.bz2obj = bz2.BZ2Decompressor()
-            self.fileobj.seek(0)
-            self.buf = b""
-        else:
-            self.bz2obj = bz2.BZ2Compressor()
-
-    def read(self, size):
-        x = len(self.buf)
-        while x < size:
-            raw = self.fileobj.read(self.blocksize)
-            if not raw:
-                break
-            data = self.bz2obj.decompress(raw)
-            self.buf += data
-            x += len(data)
-
-        buf = self.buf[:size]
-        self.buf = self.buf[size:]
-        self.pos += len(buf)
-        return buf
-
-    def seek(self, pos):
-        if pos < self.pos:
-            self.init()
-        self.read(pos - self.pos)
-
-    def tell(self):
-        return self.pos
-
-    def write(self, data):
-        self.pos += len(data)
-        raw = self.bz2obj.compress(data)
-        self.fileobj.write(raw)
-
-    def close(self):
-        if self.mode == "w":
-            raw = self.bz2obj.flush()
-            self.fileobj.write(raw)
-# class _BZ2Proxy
-
-#------------------------
-# Extraction file object
-#------------------------
-class _FileInFile(object):
-    """A thin wrapper around an existing file object that
-       provides a part of its data as an individual file
-       object.
-    """
-
-    def __init__(self, fileobj, offset, size, blockinfo=None):
-        self.fileobj = fileobj
-        self.offset = offset
-        self.size = size
-        self.position = 0
-
-        if blockinfo is None:
-            blockinfo = [(0, size)]
-
-        # Construct a map with data and zero blocks.
-        self.map_index = 0
-        self.map = []
-        lastpos = 0
-        realpos = self.offset
-        for offset, size in blockinfo:
-            if offset > lastpos:
-                self.map.append((False, lastpos, offset, None))
-            self.map.append((True, offset, offset + size, realpos))
-            realpos += size
-            lastpos = offset + size
-        if lastpos < self.size:
-            self.map.append((False, lastpos, self.size, None))
-
-    def seekable(self):
-        if not hasattr(self.fileobj, "seekable"):
-            # XXX gzip.GzipFile and bz2.BZ2File
-            return True
-        return self.fileobj.seekable()
-
-    def tell(self):
-        """Return the current file position.
-        """
-        return self.position
-
-    def seek(self, position):
-        """Seek to a position in the file.
-        """
-        self.position = position
-
-    def read(self, size=None):
-        """Read data from the file.
-        """
-        if size is None:
-            size = self.size - self.position
-        else:
-            size = min(size, self.size - self.position)
-
-        buf = b""
-        while size > 0:
-            while True:
-                data, start, stop, offset = self.map[self.map_index]
-                if start <= self.position < stop:
-                    break
-                else:
-                    self.map_index += 1
-                    if self.map_index == len(self.map):
-                        self.map_index = 0
-            length = min(size, stop - self.position)
-            if data:
-                self.fileobj.seek(offset + (self.position - start))
-                buf += self.fileobj.read(length)
-            else:
-                buf += NUL * length
-            size -= length
-            self.position += length
-        return buf
-#class _FileInFile
-
-
-class ExFileObject(object):
-    """File-like object for reading an archive member.
-       Is returned by TarFile.extractfile().
-    """
-    blocksize = 1024
-
-    def __init__(self, tarfile, tarinfo):
-        self.fileobj = _FileInFile(tarfile.fileobj,
-                                   tarinfo.offset_data,
-                                   tarinfo.size,
-                                   tarinfo.sparse)
-        self.name = tarinfo.name
-        self.mode = "r"
-        self.closed = False
-        self.size = tarinfo.size
-
-        self.position = 0
-        self.buffer = b""
-
-    def readable(self):
-        return True
-
-    def writable(self):
-        return False
-
-    def seekable(self):
-        return self.fileobj.seekable()
-
-    def read(self, size=None):
-        """Read at most size bytes from the file. If size is not
-           present or None, read all data until EOF is reached.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        buf = b""
-        if self.buffer:
-            if size is None:
-                buf = self.buffer
-                self.buffer = b""
-            else:
-                buf = self.buffer[:size]
-                self.buffer = self.buffer[size:]
-
-        if size is None:
-            buf += self.fileobj.read()
-        else:
-            buf += self.fileobj.read(size - len(buf))
-
-        self.position += len(buf)
-        return buf
-
-    # XXX TextIOWrapper uses the read1() method.
-    read1 = read
-
-    def readline(self, size=-1):
-        """Read one entire line from the file. If size is present
-           and non-negative, return a string with at most that
-           size, which may be an incomplete line.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        pos = self.buffer.find(b"\n") + 1
-        if pos == 0:
-            # no newline found.
-            while True:
-                buf = self.fileobj.read(self.blocksize)
-                self.buffer += buf
-                if not buf or b"\n" in buf:
-                    pos = self.buffer.find(b"\n") + 1
-                    if pos == 0:
-                        # no newline found.
-                        pos = len(self.buffer)
-                    break
-
-        if size != -1:
-            pos = min(size, pos)
-
-        buf = self.buffer[:pos]
-        self.buffer = self.buffer[pos:]
-        self.position += len(buf)
-        return buf
-
-    def readlines(self):
-        """Return a list with all remaining lines.
-        """
-        result = []
-        while True:
-            line = self.readline()
-            if not line: break
-            result.append(line)
-        return result
-
-    def tell(self):
-        """Return the current file position.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        return self.position
-
-    def seek(self, pos, whence=os.SEEK_SET):
-        """Seek to a position in the file.
-        """
-        if self.closed:
-            raise ValueError("I/O operation on closed file")
-
-        if whence == os.SEEK_SET:
-            self.position = min(max(pos, 0), self.size)
-        elif whence == os.SEEK_CUR:
-            if pos < 0:
-                self.position = max(self.position + pos, 0)
-            else:
-                self.position = min(self.position + pos, self.size)
-        elif whence == os.SEEK_END:
-            self.position = max(min(self.size + pos, self.size), 0)
-        else:
-            raise ValueError("Invalid argument")
-
-        self.buffer = b""
-        self.fileobj.seek(self.position)
-
-    def close(self):
-        """Close the file object.
-        """
-        self.closed = True
-
-    def __iter__(self):
-        """Get an iterator over the file's lines.
-        """
-        while True:
-            line = self.readline()
-            if not line:
-                break
-            yield line
-#class ExFileObject
-
-#------------------
-# Exported Classes
-#------------------
-class TarInfo(object):
-    """Informational class which holds the details about an
-       archive member given by a tar header block.
-       TarInfo objects are returned by TarFile.getmember(),
-       TarFile.getmembers() and TarFile.gettarinfo() and are
-       usually created internally.
-    """
-
-    __slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
-                 "chksum", "type", "linkname", "uname", "gname",
-                 "devmajor", "devminor",
-                 "offset", "offset_data", "pax_headers", "sparse",
-                 "tarfile", "_sparse_structs", "_link_target")
-
-    def __init__(self, name=""):
-        """Construct a TarInfo object. name is the optional name
-           of the member.
-        """
-        self.name = name        # member name
-        self.mode = 0o644       # file permissions
-        self.uid = 0            # user id
-        self.gid = 0            # group id
-        self.size = 0           # file size
-        self.mtime = 0          # modification time
-        self.chksum = 0         # header checksum
-        self.type = REGTYPE     # member type
-        self.linkname = ""      # link name
-        self.uname = ""         # user name
-        self.gname = ""         # group name
-        self.devmajor = 0       # device major number
-        self.devminor = 0       # device minor number
-
-        self.offset = 0         # the tar header starts here
-        self.offset_data = 0    # the file's data starts here
-
-        self.sparse = None      # sparse member information
-        self.pax_headers = {}   # pax header information
-
-    # In pax headers the "name" and "linkname" field are called
-    # "path" and "linkpath".
-    def _getpath(self):
-        return self.name
-    def _setpath(self, name):
-        self.name = name
-    path = property(_getpath, _setpath)
-
-    def _getlinkpath(self):
-        return self.linkname
-    def _setlinkpath(self, linkname):
-        self.linkname = linkname
-    linkpath = property(_getlinkpath, _setlinkpath)
-
-    def __repr__(self):
-        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
-
-    def get_info(self):
-        """Return the TarInfo's attributes as a dictionary.
-        """
-        info = {
-            "name":     self.name,
-            "mode":     self.mode & 0o7777,
-            "uid":      self.uid,
-            "gid":      self.gid,
-            "size":     self.size,
-            "mtime":    self.mtime,
-            "chksum":   self.chksum,
-            "type":     self.type,
-            "linkname": self.linkname,
-            "uname":    self.uname,
-            "gname":    self.gname,
-            "devmajor": self.devmajor,
-            "devminor": self.devminor
-        }
-
-        if info["type"] == DIRTYPE and not info["name"].endswith("/"):
-            info["name"] += "/"
-
-        return info
-
-    def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
-        """Return a tar header as a string of 512 byte blocks.
-        """
-        info = self.get_info()
-
-        if format == USTAR_FORMAT:
-            return self.create_ustar_header(info, encoding, errors)
-        elif format == GNU_FORMAT:
-            return self.create_gnu_header(info, encoding, errors)
-        elif format == PAX_FORMAT:
-            return self.create_pax_header(info, encoding)
-        else:
-            raise ValueError("invalid format")
-
-    def create_ustar_header(self, info, encoding, errors):
-        """Return the object as a ustar header block.
-        """
-        info["magic"] = POSIX_MAGIC
-
-        if len(info["linkname"]) > LENGTH_LINK:
-            raise ValueError("linkname is too long")
-
-        if len(info["name"]) > LENGTH_NAME:
-            info["prefix"], info["name"] = self._posix_split_name(info["name"])
-
-        return self._create_header(info, USTAR_FORMAT, encoding, errors)
-
-    def create_gnu_header(self, info, encoding, errors):
-        """Return the object as a GNU header block sequence.
-        """
-        info["magic"] = GNU_MAGIC
-
-        buf = b""
-        if len(info["linkname"]) > LENGTH_LINK:
-            buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
-
-        if len(info["name"]) > LENGTH_NAME:
-            buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
-
-        return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
-
-    def create_pax_header(self, info, encoding):
-        """Return the object as a ustar header block. If it cannot be
-           represented this way, prepend a pax extended header sequence
-           with supplement information.
-        """
-        info["magic"] = POSIX_MAGIC
-        pax_headers = self.pax_headers.copy()
-
-        # Test string fields for values that exceed the field length or cannot
-        # be represented in ASCII encoding.
-        for name, hname, length in (
-                ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
-                ("uname", "uname", 32), ("gname", "gname", 32)):
-
-            if hname in pax_headers:
-                # The pax header has priority.
-                continue
-
-            # Try to encode the string as ASCII.
-            try:
-                info[name].encode("ascii", "strict")
-            except UnicodeEncodeError:
-                pax_headers[hname] = info[name]
-                continue
-
-            if len(info[name]) > length:
-                pax_headers[hname] = info[name]
-
-        # Test number fields for values that exceed the field limit or values
-        # that like to be stored as float.
-        for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
-            if name in pax_headers:
-                # The pax header has priority. Avoid overflow.
-                info[name] = 0
-                continue
-
-            val = info[name]
-            if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
-                pax_headers[name] = str(val)
-                info[name] = 0
-
-        # Create a pax extended header if necessary.
-        if pax_headers:
-            buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
-        else:
-            buf = b""
-
-        return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
-
-    @classmethod
-    def create_pax_global_header(cls, pax_headers):
-        """Return the object as a pax global header block sequence.
-        """
-        return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
-
-    def _posix_split_name(self, name):
-        """Split a name longer than 100 chars into a prefix
-           and a name part.
-        """
-        prefix = name[:LENGTH_PREFIX + 1]
-        while prefix and prefix[-1] != "/":
-            prefix = prefix[:-1]
-
-        name = name[len(prefix):]
-        prefix = prefix[:-1]
-
-        if not prefix or len(name) > LENGTH_NAME:
-            raise ValueError("name is too long")
-        return prefix, name
-
-    @staticmethod
-    def _create_header(info, format, encoding, errors):
-        """Return a header block. info is a dictionary with file
-           information, format must be one of the *_FORMAT constants.
-        """
-        parts = [
-            stn(info.get("name", ""), 100, encoding, errors),
-            itn(info.get("mode", 0) & 0o7777, 8, format),
-            itn(info.get("uid", 0), 8, format),
-            itn(info.get("gid", 0), 8, format),
-            itn(info.get("size", 0), 12, format),
-            itn(info.get("mtime", 0), 12, format),
-            b"        ", # checksum field
-            info.get("type", REGTYPE),
-            stn(info.get("linkname", ""), 100, encoding, errors),
-            info.get("magic", POSIX_MAGIC),
-            stn(info.get("uname", ""), 32, encoding, errors),
-            stn(info.get("gname", ""), 32, encoding, errors),
-            itn(info.get("devmajor", 0), 8, format),
-            itn(info.get("devminor", 0), 8, format),
-            stn(info.get("prefix", ""), 155, encoding, errors)
-        ]
-
-        buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
-        chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
-        buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]
-        return buf
-
-    @staticmethod
-    def _create_payload(payload):
-        """Return the string payload filled with zero bytes
-           up to the next 512 byte border.
-        """
-        blocks, remainder = divmod(len(payload), BLOCKSIZE)
-        if remainder > 0:
-            payload += (BLOCKSIZE - remainder) * NUL
-        return payload
-
-    @classmethod
-    def _create_gnu_long_header(cls, name, type, encoding, errors):
-        """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
-           for name.
-        """
-        name = name.encode(encoding, errors) + NUL
-
-        info = {}
-        info["name"] = "././@LongLink"
-        info["type"] = type
-        info["size"] = len(name)
-        info["magic"] = GNU_MAGIC
-
-        # create extended header + name blocks.
-        return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
-                cls._create_payload(name)
-
-    @classmethod
-    def _create_pax_generic_header(cls, pax_headers, type, encoding):
-        """Return a POSIX.1-2008 extended or global header sequence
-           that contains a list of keyword, value pairs. The values
-           must be strings.
-        """
-        # Check if one of the fields contains surrogate characters and thereby
-        # forces hdrcharset=BINARY, see _proc_pax() for more information.
-        binary = False
-        for keyword, value in pax_headers.items():
-            try:
-                value.encode("utf8", "strict")
-            except UnicodeEncodeError:
-                binary = True
-                break
-
-        records = b""
-        if binary:
-            # Put the hdrcharset field at the beginning of the header.
-            records += b"21 hdrcharset=BINARY\n"
-
-        for keyword, value in pax_headers.items():
-            keyword = keyword.encode("utf8")
-            if binary:
-                # Try to restore the original byte representation of `value'.
-                # Needless to say, that the encoding must match the string.
-                value = value.encode(encoding, "surrogateescape")
-            else:
-                value = value.encode("utf8")
-
-            l = len(keyword) + len(value) + 3   # ' ' + '=' + '\n'
-            n = p = 0
-            while True:
-                n = l + len(str(p))
-                if n == p:
-                    break
-                p = n
-            records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
-
-        # We use a hardcoded "././@PaxHeader" name like star does
-        # instead of the one that POSIX recommends.
-        info = {}
-        info["name"] = "././@PaxHeader"
-        info["type"] = type
-        info["size"] = len(records)
-        info["magic"] = POSIX_MAGIC
-
-        # Create pax header + record blocks.
-        return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
-                cls._create_payload(records)
-
-    @classmethod
-    def frombuf(cls, buf, encoding, errors):
-        """Construct a TarInfo object from a 512 byte bytes object.
-        """
-        if len(buf) == 0:
-            raise EmptyHeaderError("empty header")
-        if len(buf) != BLOCKSIZE:
-            raise TruncatedHeaderError("truncated header")
-        if buf.count(NUL) == BLOCKSIZE:
-            raise EOFHeaderError("end of file header")
-
-        chksum = nti(buf[148:156])
-        if chksum not in calc_chksums(buf):
-            raise InvalidHeaderError("bad checksum")
-
-        obj = cls()
-        obj.name = nts(buf[0:100], encoding, errors)
-        obj.mode = nti(buf[100:108])
-        obj.uid = nti(buf[108:116])
-        obj.gid = nti(buf[116:124])
-        obj.size = nti(buf[124:136])
-        obj.mtime = nti(buf[136:148])
-        obj.chksum = chksum
-        obj.type = buf[156:157]
-        obj.linkname = nts(buf[157:257], encoding, errors)
-        obj.uname = nts(buf[265:297], encoding, errors)
-        obj.gname = nts(buf[297:329], encoding, errors)
-        obj.devmajor = nti(buf[329:337])
-        obj.devminor = nti(buf[337:345])
-        prefix = nts(buf[345:500], encoding, errors)
-
-        # Old V7 tar format represents a directory as a regular
-        # file with a trailing slash.
-        if obj.type == AREGTYPE and obj.name.endswith("/"):
-            obj.type = DIRTYPE
-
-        # The old GNU sparse format occupies some of the unused
-        # space in the buffer for up to 4 sparse structures.
-        # Save the them for later processing in _proc_sparse().
-        if obj.type == GNUTYPE_SPARSE:
-            pos = 386
-            structs = []
-            for i in range(4):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[482])
-            origsize = nti(buf[483:495])
-            obj._sparse_structs = (structs, isextended, origsize)
-
-        # Remove redundant slashes from directories.
-        if obj.isdir():
-            obj.name = obj.name.rstrip("/")
-
-        # Reconstruct a ustar longname.
-        if prefix and obj.type not in GNU_TYPES:
-            obj.name = prefix + "/" + obj.name
-        return obj
-
-    @classmethod
-    def fromtarfile(cls, tarfile):
-        """Return the next TarInfo object from TarFile object
-           tarfile.
-        """
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
-        obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
-        return obj._proc_member(tarfile)
-
-    #--------------------------------------------------------------------------
-    # The following are methods that are called depending on the type of a
-    # member. The entry point is _proc_member() which can be overridden in a
-    # subclass to add custom _proc_*() methods. A _proc_*() method MUST
-    # implement the following
-    # operations:
-    # 1. Set self.offset_data to the position where the data blocks begin,
-    #    if there is data that follows.
-    # 2. Set tarfile.offset to the position where the next member's header will
-    #    begin.
-    # 3. Return self or another valid TarInfo object.
-    def _proc_member(self, tarfile):
-        """Choose the right processing method depending on
-           the type and call it.
-        """
-        if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
-            return self._proc_gnulong(tarfile)
-        elif self.type == GNUTYPE_SPARSE:
-            return self._proc_sparse(tarfile)
-        elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
-            return self._proc_pax(tarfile)
-        else:
-            return self._proc_builtin(tarfile)
-
-    def _proc_builtin(self, tarfile):
-        """Process a builtin type or an unknown type which
-           will be treated as a regular file.
-        """
-        self.offset_data = tarfile.fileobj.tell()
-        offset = self.offset_data
-        if self.isreg() or self.type not in SUPPORTED_TYPES:
-            # Skip the following data blocks.
-            offset += self._block(self.size)
-        tarfile.offset = offset
-
-        # Patch the TarInfo object with saved global
-        # header information.
-        self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
-
-        return self
-
-    def _proc_gnulong(self, tarfile):
-        """Process the blocks that hold a GNU longname
-           or longlink member.
-        """
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # Fetch the next header and process it.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError:
-            raise SubsequentHeaderError("missing or bad subsequent header")
-
-        # Patch the TarInfo object from the next header with
-        # the longname information.
-        next.offset = self.offset
-        if self.type == GNUTYPE_LONGNAME:
-            next.name = nts(buf, tarfile.encoding, tarfile.errors)
-        elif self.type == GNUTYPE_LONGLINK:
-            next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
-
-        return next
-
-    def _proc_sparse(self, tarfile):
-        """Process a GNU sparse header plus extra headers.
-        """
-        # We already collected some sparse structures in frombuf().
-        structs, isextended, origsize = self._sparse_structs
-        del self._sparse_structs
-
-        # Collect sparse structures from extended header blocks.
-        while isextended:
-            buf = tarfile.fileobj.read(BLOCKSIZE)
-            pos = 0
-            for i in range(21):
-                try:
-                    offset = nti(buf[pos:pos + 12])
-                    numbytes = nti(buf[pos + 12:pos + 24])
-                except ValueError:
-                    break
-                if offset and numbytes:
-                    structs.append((offset, numbytes))
-                pos += 24
-            isextended = bool(buf[504])
-        self.sparse = structs
-
-        self.offset_data = tarfile.fileobj.tell()
-        tarfile.offset = self.offset_data + self._block(self.size)
-        self.size = origsize
-        return self
-
-    def _proc_pax(self, tarfile):
-        """Process an extended or global header as described in
-           POSIX.1-2008.
-        """
-        # Read the header information.
-        buf = tarfile.fileobj.read(self._block(self.size))
-
-        # A pax header stores supplemental information for either
-        # the following file (extended) or all following files
-        # (global).
-        if self.type == XGLTYPE:
-            pax_headers = tarfile.pax_headers
-        else:
-            pax_headers = tarfile.pax_headers.copy()
-
-        # Check if the pax header contains a hdrcharset field. This tells us
-        # the encoding of the path, linkpath, uname and gname fields. Normally,
-        # these fields are UTF-8 encoded but since POSIX.1-2008 tar
-        # implementations are allowed to store them as raw binary strings if
-        # the translation to UTF-8 fails.
-        match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
-        if match is not None:
-            pax_headers["hdrcharset"] = match.group(1).decode("utf8")
-
-        # For the time being, we don't care about anything other than "BINARY".
-        # The only other value that is currently allowed by the standard is
-        # "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
-        hdrcharset = pax_headers.get("hdrcharset")
-        if hdrcharset == "BINARY":
-            encoding = tarfile.encoding
-        else:
-            encoding = "utf8"
-
-        # Parse pax header information. A record looks like that:
-        # "%d %s=%s\n" % (length, keyword, value). length is the size
-        # of the complete record including the length field itself and
-        # the newline. keyword and value are both UTF-8 encoded strings.
-        regex = re.compile(br"(\d+) ([^=]+)=")
-        pos = 0
-        while True:
-            match = regex.match(buf, pos)
-            if not match:
-                break
-
-            length, keyword = match.groups()
-            length = int(length)
-            value = buf[match.end(2) + 1:match.start(1) + length - 1]
-
-            # Normally, we could just use "utf8" as the encoding and "strict"
-            # as the error handler, but we better not take the risk. For
-            # example, GNU tar <= 1.23 is known to store filenames it cannot
-            # translate to UTF-8 as raw strings (unfortunately without a
-            # hdrcharset=BINARY header).
-            # We first try the strict standard encoding, and if that fails we
-            # fall back on the user's encoding and error handler.
-            keyword = self._decode_pax_field(keyword, "utf8", "utf8",
-                    tarfile.errors)
-            if keyword in PAX_NAME_FIELDS:
-                value = self._decode_pax_field(value, encoding, tarfile.encoding,
-                        tarfile.errors)
-            else:
-                value = self._decode_pax_field(value, "utf8", "utf8",
-                        tarfile.errors)
-
-            pax_headers[keyword] = value
-            pos += length
-
-        # Fetch the next header.
-        try:
-            next = self.fromtarfile(tarfile)
-        except HeaderError:
-            raise SubsequentHeaderError("missing or bad subsequent header")
-
-        # Process GNU sparse information.
-        if "GNU.sparse.map" in pax_headers:
-            # GNU extended sparse format version 0.1.
-            self._proc_gnusparse_01(next, pax_headers)
-
-        elif "GNU.sparse.size" in pax_headers:
-            # GNU extended sparse format version 0.0.
-            self._proc_gnusparse_00(next, pax_headers, buf)
-
-        elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
-            # GNU extended sparse format version 1.0.
-            self._proc_gnusparse_10(next, pax_headers, tarfile)
-
-        if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
-            # Patch the TarInfo object with the extended header info.
-            next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
-            next.offset = self.offset
-
-            if "size" in pax_headers:
-                # If the extended header replaces the size field,
-                # we need to recalculate the offset where the next
-                # header starts.
-                offset = next.offset_data
-                if next.isreg() or next.type not in SUPPORTED_TYPES:
-                    offset += next._block(next.size)
-                tarfile.offset = offset
-
-        return next
-
-    def _proc_gnusparse_00(self, next, pax_headers, buf):
-        """Process a GNU tar extended sparse header, version 0.0.
-        """
-        offsets = []
-        for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
-            offsets.append(int(match.group(1)))
-        numbytes = []
-        for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
-            numbytes.append(int(match.group(1)))
-        next.sparse = list(zip(offsets, numbytes))
-
-    def _proc_gnusparse_01(self, next, pax_headers):
-        """Process a GNU tar extended sparse header, version 0.1.
-        """
-        sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _proc_gnusparse_10(self, next, pax_headers, tarfile):
-        """Process a GNU tar extended sparse header, version 1.0.
-        """
-        fields = None
-        sparse = []
-        buf = tarfile.fileobj.read(BLOCKSIZE)
-        fields, buf = buf.split(b"\n", 1)
-        fields = int(fields)
-        while len(sparse) < fields * 2:
-            if b"\n" not in buf:
-                buf += tarfile.fileobj.read(BLOCKSIZE)
-            number, buf = buf.split(b"\n", 1)
-            sparse.append(int(number))
-        next.offset_data = tarfile.fileobj.tell()
-        next.sparse = list(zip(sparse[::2], sparse[1::2]))
-
-    def _apply_pax_info(self, pax_headers, encoding, errors):
-        """Replace fields with supplemental information from a previous
-           pax extended or global header.
-        """
-        for keyword, value in pax_headers.items():
-            if keyword == "GNU.sparse.name":
-                setattr(self, "path", value)
-            elif keyword == "GNU.sparse.size":
-                setattr(self, "size", int(value))
-            elif keyword == "GNU.sparse.realsize":
-                setattr(self, "size", int(value))
-            elif keyword in PAX_FIELDS:
-                if keyword in PAX_NUMBER_FIELDS:
-                    try:
-                        value = PAX_NUMBER_FIELDS[keyword](value)
-                    except ValueError:
-                        value = 0
-                if keyword == "path":
-                    value = value.rstrip("/")
-                setattr(self, keyword, value)
-
-        self.pax_headers = pax_headers.copy()
-
-    def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
-        """Decode a single field from a pax record.
-        """
-        try:
-            return value.decode(encoding, "strict")
-        except UnicodeDecodeError:
-            return value.decode(fallback_encoding, fallback_errors)
-
-    def _block(self, count):
-        """Round up a byte count by BLOCKSIZE and return it,
-           e.g. _block(834) => 1024.
-        """
-        blocks, remainder = divmod(count, BLOCKSIZE)
-        if remainder:
-            blocks += 1
-        return blocks * BLOCKSIZE
-
-    def isreg(self):
-        return self.type in REGULAR_TYPES
-    def isfile(self):
-        return self.isreg()
-    def isdir(self):
-        return self.type == DIRTYPE
-    def issym(self):
-        return self.type == SYMTYPE
-    def islnk(self):
-        return self.type == LNKTYPE
-    def ischr(self):
-        return self.type == CHRTYPE
-    def isblk(self):
-        return self.type == BLKTYPE
-    def isfifo(self):
-        return self.type == FIFOTYPE
-    def issparse(self):
-        return self.sparse is not None
-    def isdev(self):
-        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
-# class TarInfo
-
-class TarFile(object):
-    """The TarFile Class provides an interface to tar archives.
-    """
-
-    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
-
-    dereference = False         # If true, add content of linked file to the
-                                # tar file, else the link.
-
-    ignore_zeros = False        # If true, skips empty or invalid blocks and
-                                # continues processing.
-
-    errorlevel = 1              # If 0, fatal errors only appear in debug
-                                # messages (if debug >= 0). If > 0, errors
-                                # are passed to the caller as exceptions.
-
-    format = DEFAULT_FORMAT     # The format to use when creating an archive.
-
-    encoding = ENCODING         # Encoding for 8-bit character strings.
-
-    errors = None               # Error handler for unicode conversion.
-
-    tarinfo = TarInfo           # The default TarInfo class to use.
-
-    fileobject = ExFileObject   # The default ExFileObject class to use.
-
-    def __init__(self, name=None, mode="r", fileobj=None, format=None,
-            tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
-            errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
-        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
-           read from an existing archive, 'a' to append data to an existing
-           file or 'w' to create a new file overwriting an existing one. `mode'
-           defaults to 'r'.
-           If `fileobj' is given, it is used for reading or writing data. If it
-           can be determined, `mode' is overridden by `fileobj's mode.
-           `fileobj' is not closed, when TarFile is closed.
-        """
-        if len(mode) > 1 or mode not in "raw":
-            raise ValueError("mode must be 'r', 'a' or 'w'")
-        self.mode = mode
-        self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
-
-        if not fileobj:
-            if self.mode == "a" and not os.path.exists(name):
-                # Create nonexistent files in append mode.
-                self.mode = "w"
-                self._mode = "wb"
-            fileobj = bltn_open(name, self._mode)
-            self._extfileobj = False
-        else:
-            if name is None and hasattr(fileobj, "name"):
-                name = fileobj.name
-            if hasattr(fileobj, "mode"):
-                self._mode = fileobj.mode
-            self._extfileobj = True
-        self.name = os.path.abspath(name) if name else None
-        self.fileobj = fileobj
-
-        # Init attributes.
-        if format is not None:
-            self.format = format
-        if tarinfo is not None:
-            self.tarinfo = tarinfo
-        if dereference is not None:
-            self.dereference = dereference
-        if ignore_zeros is not None:
-            self.ignore_zeros = ignore_zeros
-        if encoding is not None:
-            self.encoding = encoding
-        self.errors = errors
-
-        if pax_headers is not None and self.format == PAX_FORMAT:
-            self.pax_headers = pax_headers
-        else:
-            self.pax_headers = {}
-
-        if debug is not None:
-            self.debug = debug
-        if errorlevel is not None:
-            self.errorlevel = errorlevel
-
-        # Init datastructures.
-        self.closed = False
-        self.members = []       # list of members as TarInfo objects
-        self._loaded = False    # flag if all members have been read
-        self.offset = self.fileobj.tell()
-                                # current position in the archive file
-        self.inodes = {}        # dictionary caching the inodes of
-                                # archive members already added
-
-        try:
-            if self.mode == "r":
-                self.firstmember = None
-                self.firstmember = self.next()
-
-            if self.mode == "a":
-                # Move to the end of the archive,
-                # before the first empty block.
-                while True:
-                    self.fileobj.seek(self.offset)
-                    try:
-                        tarinfo = self.tarinfo.fromtarfile(self)
-                        self.members.append(tarinfo)
-                    except EOFHeaderError:
-                        self.fileobj.seek(self.offset)
-                        break
-                    except HeaderError as e:
-                        raise ReadError(str(e))
-
-            if self.mode in "aw":
-                self._loaded = True
-
-                if self.pax_headers:
-                    buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
-                    self.fileobj.write(buf)
-                    self.offset += len(buf)
-        except:
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-            raise
-
-    #--------------------------------------------------------------------------
-    # Below are the classmethods which act as alternate constructors to the
-    # TarFile class. The open() method is the only one that is needed for
-    # public use; it is the "super"-constructor and is able to select an
-    # adequate "sub"-constructor for a particular compression using the mapping
-    # from OPEN_METH.
-    #
-    # This concept allows one to subclass TarFile without losing the comfort of
-    # the super-constructor. A sub-constructor is registered and made available
-    # by adding it to the mapping in OPEN_METH.
-
-    @classmethod
-    def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
-        """Open a tar archive for reading, writing or appending. Return
-           an appropriate TarFile class.
-
-           mode:
-           'r' or 'r:*' open for reading with transparent compression
-           'r:'         open for reading exclusively uncompressed
-           'r:gz'       open for reading with gzip compression
-           'r:bz2'      open for reading with bzip2 compression
-           'a' or 'a:'  open for appending, creating the file if necessary
-           'w' or 'w:'  open for writing without compression
-           'w:gz'       open for writing with gzip compression
-           'w:bz2'      open for writing with bzip2 compression
-
-           'r|*'        open a stream of tar blocks with transparent compression
-           'r|'         open an uncompressed stream of tar blocks for reading
-           'r|gz'       open a gzip compressed stream of tar blocks
-           'r|bz2'      open a bzip2 compressed stream of tar blocks
-           'w|'         open an uncompressed stream for writing
-           'w|gz'       open a gzip compressed stream for writing
-           'w|bz2'      open a bzip2 compressed stream for writing
-        """
-
-        if not name and not fileobj:
-            raise ValueError("nothing to open")
-
-        if mode in ("r", "r:*"):
-            # Find out which *open() is appropriate for opening the file.
-            for comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-                if fileobj is not None:
-                    saved_pos = fileobj.tell()
-                try:
-                    return func(name, "r", fileobj, **kwargs)
-                except (ReadError, CompressionError) as e:
-                    if fileobj is not None:
-                        fileobj.seek(saved_pos)
-                    continue
-            raise ReadError("file could not be opened successfully")
-
-        elif ":" in mode:
-            filemode, comptype = mode.split(":", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            # Select the *open() function according to
-            # given compression.
-            if comptype in cls.OPEN_METH:
-                func = getattr(cls, cls.OPEN_METH[comptype])
-            else:
-                raise CompressionError("unknown compression type %r" % comptype)
-            return func(name, filemode, fileobj, **kwargs)
-
-        elif "|" in mode:
-            filemode, comptype = mode.split("|", 1)
-            filemode = filemode or "r"
-            comptype = comptype or "tar"
-
-            if filemode not in "rw":
-                raise ValueError("mode must be 'r' or 'w'")
-
-            stream = _Stream(name, filemode, comptype, fileobj, bufsize)
-            try:
-                t = cls(name, filemode, stream, **kwargs)
-            except:
-                stream.close()
-                raise
-            t._extfileobj = False
-            return t
-
-        elif mode in "aw":
-            return cls.taropen(name, mode, fileobj, **kwargs)
-
-        raise ValueError("undiscernible mode")
-
-    @classmethod
-    def taropen(cls, name, mode="r", fileobj=None, **kwargs):
-        """Open uncompressed tar archive name for reading or writing.
-        """
-        if len(mode) > 1 or mode not in "raw":
-            raise ValueError("mode must be 'r', 'a' or 'w'")
-        return cls(name, mode, fileobj, **kwargs)
-
-    @classmethod
-    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open gzip compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if len(mode) > 1 or mode not in "rw":
-            raise ValueError("mode must be 'r' or 'w'")
-
-        try:
-            import gzip
-            gzip.GzipFile
-        except (ImportError, AttributeError):
-            raise CompressionError("gzip module is not available")
-
-        extfileobj = fileobj is not None
-        try:
-            fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except IOError:
-            if not extfileobj and fileobj is not None:
-                fileobj.close()
-            if fileobj is None:
-                raise
-            raise ReadError("not a gzip file")
-        except:
-            if not extfileobj and fileobj is not None:
-                fileobj.close()
-            raise
-        t._extfileobj = extfileobj
-        return t
-
-    @classmethod
-    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
-        """Open bzip2 compressed tar archive name for reading or writing.
-           Appending is not allowed.
-        """
-        if len(mode) > 1 or mode not in "rw":
-            raise ValueError("mode must be 'r' or 'w'.")
-
-        try:
-            import bz2
-        except ImportError:
-            raise CompressionError("bz2 module is not available")
-
-        if fileobj is not None:
-            fileobj = _BZ2Proxy(fileobj, mode)
-        else:
-            fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
-
-        try:
-            t = cls.taropen(name, mode, fileobj, **kwargs)
-        except (IOError, EOFError):
-            fileobj.close()
-            raise ReadError("not a bzip2 file")
-        t._extfileobj = False
-        return t
-
-    # All *open() methods are registered here.
-    OPEN_METH = {
-        "tar": "taropen",   # uncompressed tar
-        "gz":  "gzopen",    # gzip compressed tar
-        "bz2": "bz2open"    # bzip2 compressed tar
-    }
-
-    #--------------------------------------------------------------------------
-    # The public methods which TarFile provides:
-
-    def close(self):
-        """Close the TarFile. In write-mode, two finishing zero blocks are
-           appended to the archive.
-        """
-        if self.closed:
-            return
-
-        if self.mode in "aw":
-            self.fileobj.write(NUL * (BLOCKSIZE * 2))
-            self.offset += (BLOCKSIZE * 2)
-            # fill up the end with zero-blocks
-            # (like option -b20 for tar does)
-            blocks, remainder = divmod(self.offset, RECORDSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (RECORDSIZE - remainder))
-
-        if not self._extfileobj:
-            self.fileobj.close()
-        self.closed = True
-
-    def getmember(self, name):
-        """Return a TarInfo object for member `name'. If `name' can not be
-           found in the archive, KeyError is raised. If a member occurs more
-           than once in the archive, its last occurrence is assumed to be the
-           most up-to-date version.
-        """
-        tarinfo = self._getmember(name)
-        if tarinfo is None:
-            raise KeyError("filename %r not found" % name)
-        return tarinfo
-
-    def getmembers(self):
-        """Return the members of the archive as a list of TarInfo objects. The
-           list has the same order as the members in the archive.
-        """
-        self._check()
-        if not self._loaded:    # if we want to obtain a list of
-            self._load()        # all members, we first have to
-                                # scan the whole archive.
-        return self.members
-
-    def getnames(self):
-        """Return the members of the archive as a list of their names. It has
-           the same order as the list returned by getmembers().
-        """
-        return [tarinfo.name for tarinfo in self.getmembers()]
-
-    def gettarinfo(self, name=None, arcname=None, fileobj=None):
-        """Create a TarInfo object for either the file `name' or the file
-           object `fileobj' (using os.fstat on its file descriptor). You can
-           modify some of the TarInfo's attributes before you add it using
-           addfile(). If given, `arcname' specifies an alternative name for the
-           file in the archive.
-        """
-        self._check("aw")
-
-        # When fileobj is given, replace name by
-        # fileobj's real name.
-        if fileobj is not None:
-            name = fileobj.name
-
-        # Building the name of the member in the archive.
-        # Backward slashes are converted to forward slashes,
-        # Absolute paths are turned to relative paths.
-        if arcname is None:
-            arcname = name
-        drv, arcname = os.path.splitdrive(arcname)
-        arcname = arcname.replace(os.sep, "/")
-        arcname = arcname.lstrip("/")
-
-        # Now, fill the TarInfo object with
-        # information specific for the file.
-        tarinfo = self.tarinfo()
-        tarinfo.tarfile = self
-
-        # Use os.stat or os.lstat, depending on platform
-        # and if symlinks shall be resolved.
-        if fileobj is None:
-            if hasattr(os, "lstat") and not self.dereference:
-                statres = os.lstat(name)
-            else:
-                statres = os.stat(name)
-        else:
-            statres = os.fstat(fileobj.fileno())
-        linkname = ""
-
-        stmd = statres.st_mode
-        if stat.S_ISREG(stmd):
-            inode = (statres.st_ino, statres.st_dev)
-            if not self.dereference and statres.st_nlink > 1 and \
-                    inode in self.inodes and arcname != self.inodes[inode]:
-                # Is it a hardlink to an already
-                # archived file?
-                type = LNKTYPE
-                linkname = self.inodes[inode]
-            else:
-                # The inode is added only if its valid.
-                # For win32 it is always 0.
-                type = REGTYPE
-                if inode[0]:
-                    self.inodes[inode] = arcname
-        elif stat.S_ISDIR(stmd):
-            type = DIRTYPE
-        elif stat.S_ISFIFO(stmd):
-            type = FIFOTYPE
-        elif stat.S_ISLNK(stmd):
-            type = SYMTYPE
-            linkname = os.readlink(name)
-        elif stat.S_ISCHR(stmd):
-            type = CHRTYPE
-        elif stat.S_ISBLK(stmd):
-            type = BLKTYPE
-        else:
-            return None
-
-        # Fill the TarInfo object with all
-        # information we can get.
-        tarinfo.name = arcname
-        tarinfo.mode = stmd
-        tarinfo.uid = statres.st_uid
-        tarinfo.gid = statres.st_gid
-        if type == REGTYPE:
-            tarinfo.size = statres.st_size
-        else:
-            tarinfo.size = 0
-        tarinfo.mtime = statres.st_mtime
-        tarinfo.type = type
-        tarinfo.linkname = linkname
-        if pwd:
-            try:
-                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
-            except KeyError:
-                pass
-        if grp:
-            try:
-                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
-            except KeyError:
-                pass
-
-        if type in (CHRTYPE, BLKTYPE):
-            if hasattr(os, "major") and hasattr(os, "minor"):
-                tarinfo.devmajor = os.major(statres.st_rdev)
-                tarinfo.devminor = os.minor(statres.st_rdev)
-        return tarinfo
-
-    def list(self, verbose=True):
-        """Print a table of contents to sys.stdout. If `verbose' is False, only
-           the names of the members are printed. If it is True, an `ls -l'-like
-           output is produced.
-        """
-        self._check()
-
-        for tarinfo in self:
-            if verbose:
-                print(filemode(tarinfo.mode), end=' ')
-                print("%s/%s" % (tarinfo.uname or tarinfo.uid,
-                                 tarinfo.gname or tarinfo.gid), end=' ')
-                if tarinfo.ischr() or tarinfo.isblk():
-                    print("%10s" % ("%d,%d" \
-                                    % (tarinfo.devmajor, tarinfo.devminor)), end=' ')
-                else:
-                    print("%10d" % tarinfo.size, end=' ')
-                print("%d-%02d-%02d %02d:%02d:%02d" \
-                      % time.localtime(tarinfo.mtime)[:6], end=' ')
-
-            print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
-
-            if verbose:
-                if tarinfo.issym():
-                    print("->", tarinfo.linkname, end=' ')
-                if tarinfo.islnk():
-                    print("link to", tarinfo.linkname, end=' ')
-            print()
-
-    def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
-        """Add the file `name' to the archive. `name' may be any type of file
-           (directory, fifo, symbolic link, etc.). If given, `arcname'
-           specifies an alternative name for the file in the archive.
-           Directories are added recursively by default. This can be avoided by
-           setting `recursive' to False. `exclude' is a function that should
-           return True for each filename to be excluded. `filter' is a function
-           that expects a TarInfo object argument and returns the changed
-           TarInfo object, if it returns None the TarInfo object will be
-           excluded from the archive.
-        """
-        self._check("aw")
-
-        if arcname is None:
-            arcname = name
-
-        # Exclude pathnames.
-        if exclude is not None:
-            import warnings
-            warnings.warn("use the filter argument instead",
-                    DeprecationWarning, 2)
-            if exclude(name):
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Skip if somebody tries to archive the archive...
-        if self.name is not None and os.path.abspath(name) == self.name:
-            self._dbg(2, "tarfile: Skipped %r" % name)
-            return
-
-        self._dbg(1, name)
-
-        # Create a TarInfo object from the file.
-        tarinfo = self.gettarinfo(name, arcname)
-
-        if tarinfo is None:
-            self._dbg(1, "tarfile: Unsupported type %r" % name)
-            return
-
-        # Change or exclude the TarInfo object.
-        if filter is not None:
-            tarinfo = filter(tarinfo)
-            if tarinfo is None:
-                self._dbg(2, "tarfile: Excluded %r" % name)
-                return
-
-        # Append the tar header and data to the archive.
-        if tarinfo.isreg():
-            f = bltn_open(name, "rb")
-            self.addfile(tarinfo, f)
-            f.close()
-
-        elif tarinfo.isdir():
-            self.addfile(tarinfo)
-            if recursive:
-                for f in os.listdir(name):
-                    self.add(os.path.join(name, f), os.path.join(arcname, f),
-                            recursive, exclude, filter=filter)
-
-        else:
-            self.addfile(tarinfo)
-
-    def addfile(self, tarinfo, fileobj=None):
-        """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
-           given, tarinfo.size bytes are read from it and added to the archive.
-           You can create TarInfo objects using gettarinfo().
-           On Windows platforms, `fileobj' should always be opened with mode
-           'rb' to avoid irritation about the file size.
-        """
-        self._check("aw")
-
-        tarinfo = copy.copy(tarinfo)
-
-        buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
-        self.fileobj.write(buf)
-        self.offset += len(buf)
-
-        # If there's data to follow, append it.
-        if fileobj is not None:
-            copyfileobj(fileobj, self.fileobj, tarinfo.size)
-            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
-            if remainder > 0:
-                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
-                blocks += 1
-            self.offset += blocks * BLOCKSIZE
-
-        self.members.append(tarinfo)
-
-    def extractall(self, path=".", members=None):
-        """Extract all members from the archive to the current working
-           directory and set owner, modification time and permissions on
-           directories afterwards. `path' specifies a different directory
-           to extract to. `members' is optional and must be a subset of the
-           list returned by getmembers().
-        """
-        directories = []
-
-        if members is None:
-            members = self
-
-        for tarinfo in members:
-            if tarinfo.isdir():
-                # Extract directories with a safe mode.
-                directories.append(tarinfo)
-                tarinfo = copy.copy(tarinfo)
-                tarinfo.mode = 0o700
-            # Do not set_attrs directories, as we will do that further down
-            self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())
-
-        # Reverse sort directories.
-        directories.sort(key=lambda a: a.name)
-        directories.reverse()
-
-        # Set correct owner, mtime and filemode on directories.
-        for tarinfo in directories:
-            dirpath = os.path.join(path, tarinfo.name)
-            try:
-                self.chown(tarinfo, dirpath)
-                self.utime(tarinfo, dirpath)
-                self.chmod(tarinfo, dirpath)
-            except ExtractError as e:
-                if self.errorlevel > 1:
-                    raise
-                else:
-                    self._dbg(1, "tarfile: %s" % e)
-
-    def extract(self, member, path="", set_attrs=True):
-        """Extract a member from the archive to the current working directory,
-           using its full name. Its file information is extracted as accurately
-           as possible. `member' may be a filename or a TarInfo object. You can
-           specify a different directory using `path'. File attributes (owner,
-           mtime, mode) are set unless `set_attrs' is False.
-        """
-        self._check("r")
-
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        # Prepare the link target for makelink().
-        if tarinfo.islnk():
-            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
-
-        try:
-            self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
-                                 set_attrs=set_attrs)
-        except EnvironmentError as e:
-            if self.errorlevel > 0:
-                raise
-            else:
-                if e.filename is None:
-                    self._dbg(1, "tarfile: %s" % e.strerror)
-                else:
-                    self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
-        except ExtractError as e:
-            if self.errorlevel > 1:
-                raise
-            else:
-                self._dbg(1, "tarfile: %s" % e)
-
-    def extractfile(self, member):
-        """Extract a member from the archive as a file object. `member' may be
-           a filename or a TarInfo object. If `member' is a regular file, a
-           file-like object is returned. If `member' is a link, a file-like
-           object is constructed from the link's target. If `member' is none of
-           the above, None is returned.
-           The file-like object is read-only and provides the following
-           methods: read(), readline(), readlines(), seek() and tell()
-        """
-        self._check("r")
-
-        if isinstance(member, str):
-            tarinfo = self.getmember(member)
-        else:
-            tarinfo = member
-
-        if tarinfo.isreg():
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            # If a member's type is unknown, it is treated as a
-            # regular file.
-            return self.fileobject(self, tarinfo)
-
-        elif tarinfo.islnk() or tarinfo.issym():
-            if isinstance(self.fileobj, _Stream):
-                # A small but ugly workaround for the case that someone tries
-                # to extract a (sym)link as a file-object from a non-seekable
-                # stream of tar blocks.
-                raise StreamError("cannot extract (sym)link as file object")
-            else:
-                # A (sym)link's file object is its target's file object.
-                return self.extractfile(self._find_link_target(tarinfo))
-        else:
-            # If there's no data associated with the member (directory, chrdev,
-            # blkdev, etc.), return None instead of a file object.
-            return None
-
-    def _extract_member(self, tarinfo, targetpath, set_attrs=True):
-        """Extract the TarInfo object tarinfo to a physical
-           file called targetpath.
-        """
-        # Fetch the TarInfo object for the given name
-        # and build the destination pathname, replacing
-        # forward slashes to platform specific separators.
-        targetpath = targetpath.rstrip("/")
-        targetpath = targetpath.replace("/", os.sep)
-
-        # Create all upper directories.
-        upperdirs = os.path.dirname(targetpath)
-        if upperdirs and not os.path.exists(upperdirs):
-            # Create directories that are not part of the archive with
-            # default permissions.
-            os.makedirs(upperdirs)
-
-        if tarinfo.islnk() or tarinfo.issym():
-            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
-        else:
-            self._dbg(1, tarinfo.name)
-
-        if tarinfo.isreg():
-            self.makefile(tarinfo, targetpath)
-        elif tarinfo.isdir():
-            self.makedir(tarinfo, targetpath)
-        elif tarinfo.isfifo():
-            self.makefifo(tarinfo, targetpath)
-        elif tarinfo.ischr() or tarinfo.isblk():
-            self.makedev(tarinfo, targetpath)
-        elif tarinfo.islnk() or tarinfo.issym():
-            self.makelink(tarinfo, targetpath)
-        elif tarinfo.type not in SUPPORTED_TYPES:
-            self.makeunknown(tarinfo, targetpath)
-        else:
-            self.makefile(tarinfo, targetpath)
-
-        if set_attrs:
-            self.chown(tarinfo, targetpath)
-            if not tarinfo.issym():
-                self.chmod(tarinfo, targetpath)
-                self.utime(tarinfo, targetpath)
-
-    #--------------------------------------------------------------------------
-    # Below are the different file methods. They are called via
-    # _extract_member() when extract() is called. They can be replaced in a
-    # subclass to implement other functionality.
-
-    def makedir(self, tarinfo, targetpath):
-        """Make a directory called targetpath.
-        """
-        try:
-            # Use a safe mode for the directory, the real mode is set
-            # later in _extract_member().
-            os.mkdir(targetpath, 0o700)
-        except EnvironmentError as e:
-            if e.errno != errno.EEXIST:
-                raise
-
-    def makefile(self, tarinfo, targetpath):
-        """Make a file called targetpath.
-        """
-        source = self.fileobj
-        source.seek(tarinfo.offset_data)
-        target = bltn_open(targetpath, "wb")
-        if tarinfo.sparse is not None:
-            for offset, size in tarinfo.sparse:
-                target.seek(offset)
-                copyfileobj(source, target, size)
-        else:
-            copyfileobj(source, target, tarinfo.size)
-        target.seek(tarinfo.size)
-        target.truncate()
-        target.close()
-
-    def makeunknown(self, tarinfo, targetpath):
-        """Make a file from a TarInfo object with an unknown type
-           at targetpath.
-        """
-        self.makefile(tarinfo, targetpath)
-        self._dbg(1, "tarfile: Unknown file type %r, " \
-                     "extracted as regular file." % tarinfo.type)
-
-    def makefifo(self, tarinfo, targetpath):
-        """Make a fifo called targetpath.
-        """
-        if hasattr(os, "mkfifo"):
-            os.mkfifo(targetpath)
-        else:
-            raise ExtractError("fifo not supported by system")
-
-    def makedev(self, tarinfo, targetpath):
-        """Make a character or block device called targetpath.
-        """
-        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
-            raise ExtractError("special devices not supported by system")
-
-        mode = tarinfo.mode
-        if tarinfo.isblk():
-            mode |= stat.S_IFBLK
-        else:
-            mode |= stat.S_IFCHR
-
-        os.mknod(targetpath, mode,
-                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
-
-    def makelink(self, tarinfo, targetpath):
-        """Make a (symbolic) link called targetpath. If it cannot be created
-          (platform limitation), we try to make a copy of the referenced file
-          instead of a link.
-        """
-        try:
-            # For systems that support symbolic and hard links.
-            if tarinfo.issym():
-                os.symlink(tarinfo.linkname, targetpath)
-            else:
-                # See extract().
-                if os.path.exists(tarinfo._link_target):
-                    os.link(tarinfo._link_target, targetpath)
-                else:
-                    self._extract_member(self._find_link_target(tarinfo),
-                                         targetpath)
-        except symlink_exception:
-            if tarinfo.issym():
-                linkpath = os.path.join(os.path.dirname(tarinfo.name),
-                                        tarinfo.linkname)
-            else:
-                linkpath = tarinfo.linkname
-        else:
-            try:
-                self._extract_member(self._find_link_target(tarinfo),
-                                     targetpath)
-            except KeyError:
-                raise ExtractError("unable to resolve link inside archive")
-
-    def chown(self, tarinfo, targetpath):
-        """Set owner of targetpath according to tarinfo.
-        """
-        if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
-            # We have to be root to do so.
-            try:
-                g = grp.getgrnam(tarinfo.gname)[2]
-            except KeyError:
-                g = tarinfo.gid
-            try:
-                u = pwd.getpwnam(tarinfo.uname)[2]
-            except KeyError:
-                u = tarinfo.uid
-            try:
-                if tarinfo.issym() and hasattr(os, "lchown"):
-                    os.lchown(targetpath, u, g)
-                else:
-                    if sys.platform != "os2emx":
-                        os.chown(targetpath, u, g)
-            except EnvironmentError as e:
-                raise ExtractError("could not change owner")
-
-    def chmod(self, tarinfo, targetpath):
-        """Set file permissions of targetpath according to tarinfo.
-        """
-        if hasattr(os, 'chmod'):
-            try:
-                os.chmod(targetpath, tarinfo.mode)
-            except EnvironmentError as e:
-                raise ExtractError("could not change mode")
-
-    def utime(self, tarinfo, targetpath):
-        """Set modification time of targetpath according to tarinfo.
-        """
-        if not hasattr(os, 'utime'):
-            return
-        try:
-            os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
-        except EnvironmentError as e:
-            raise ExtractError("could not change modification time")
-
-    #--------------------------------------------------------------------------
-    def next(self):
-        """Return the next member of the archive as a TarInfo object, when
-           TarFile is opened for reading. Return None if there is no more
-           available.
-        """
-        self._check("ra")
-        if self.firstmember is not None:
-            m = self.firstmember
-            self.firstmember = None
-            return m
-
-        # Read the next block.
-        self.fileobj.seek(self.offset)
-        tarinfo = None
-        while True:
-            try:
-                tarinfo = self.tarinfo.fromtarfile(self)
-            except EOFHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-            except InvalidHeaderError as e:
-                if self.ignore_zeros:
-                    self._dbg(2, "0x%X: %s" % (self.offset, e))
-                    self.offset += BLOCKSIZE
-                    continue
-                elif self.offset == 0:
-                    raise ReadError(str(e))
-            except EmptyHeaderError:
-                if self.offset == 0:
-                    raise ReadError("empty file")
-            except TruncatedHeaderError as e:
-                if self.offset == 0:
-                    raise ReadError(str(e))
-            except SubsequentHeaderError as e:
-                raise ReadError(str(e))
-            break
-
-        if tarinfo is not None:
-            self.members.append(tarinfo)
-        else:
-            self._loaded = True
-
-        return tarinfo
-
-    #--------------------------------------------------------------------------
-    # Little helper methods:
-
-    def _getmember(self, name, tarinfo=None, normalize=False):
-        """Find an archive member by name from bottom to top.
-           If tarinfo is given, it is used as the starting point.
-        """
-        # Ensure that all members have been loaded.
-        members = self.getmembers()
-
-        # Limit the member search list up to tarinfo.
-        if tarinfo is not None:
-            members = members[:members.index(tarinfo)]
-
-        if normalize:
-            name = os.path.normpath(name)
-
-        for member in reversed(members):
-            if normalize:
-                member_name = os.path.normpath(member.name)
-            else:
-                member_name = member.name
-
-            if name == member_name:
-                return member
-
-    def _load(self):
-        """Read through the entire archive file and look for readable
-           members.
-        """
-        while True:
-            tarinfo = self.next()
-            if tarinfo is None:
-                break
-        self._loaded = True
-
-    def _check(self, mode=None):
-        """Check if TarFile is still open, and if the operation's mode
-           corresponds to TarFile's mode.
-        """
-        if self.closed:
-            raise IOError("%s is closed" % self.__class__.__name__)
-        if mode is not None and self.mode not in mode:
-            raise IOError("bad operation for mode %r" % self.mode)
-
-    def _find_link_target(self, tarinfo):
-        """Find the target member of a symlink or hardlink member in the
-           archive.
-        """
-        if tarinfo.issym():
-            # Always search the entire archive.
-            linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
-            limit = None
-        else:
-            # Search the archive before the link, because a hard link is
-            # just a reference to an already archived file.
-            linkname = tarinfo.linkname
-            limit = tarinfo
-
-        member = self._getmember(linkname, tarinfo=limit, normalize=True)
-        if member is None:
-            raise KeyError("linkname %r not found" % linkname)
-        return member
-
-    def __iter__(self):
-        """Provide an iterator object.
-        """
-        if self._loaded:
-            return iter(self.members)
-        else:
-            return TarIter(self)
-
-    def _dbg(self, level, msg):
-        """Write debugging output to sys.stderr.
-        """
-        if level <= self.debug:
-            print(msg, file=sys.stderr)
-
-    def __enter__(self):
-        self._check()
-        return self
-
-    def __exit__(self, type, value, traceback):
-        if type is None:
-            self.close()
-        else:
-            # An exception occurred. We must not call close() because
-            # it would try to write end-of-archive blocks and padding.
-            if not self._extfileobj:
-                self.fileobj.close()
-            self.closed = True
-# class TarFile
-
-class TarIter(object):
-    """Iterator Class.
-
-       for tarinfo in TarFile(...):
-           suite...
-    """
-
-    def __init__(self, tarfile):
-        """Construct a TarIter object.
-        """
-        self.tarfile = tarfile
-        self.index = 0
-    def __iter__(self):
-        """Return iterator object.
-        """
-        return self
-
-    def __next__(self):
-        """Return the next item using TarFile's next() method.
-           When all members have been read, set TarFile as _loaded.
-        """
-        # Fix for SF #1100429: Under rare circumstances it can
-        # happen that getmembers() is called during iteration,
-        # which will cause TarIter to stop prematurely.
-        if not self.tarfile._loaded:
-            tarinfo = self.tarfile.next()
-            if not tarinfo:
-                self.tarfile._loaded = True
-                raise StopIteration
-        else:
-            try:
-                tarinfo = self.tarfile.members[self.index]
-            except IndexError:
-                raise StopIteration
-        self.index += 1
-        return tarinfo
-
-    next = __next__ # for Python 2.x
-
-#--------------------
-# exported functions
-#--------------------
-def is_tarfile(name):
-    """Return True if name points to a tar archive that we
-       are able to handle, else return False.
-    """
-    try:
-        t = open(name)
-        t.close()
-        return True
-    except TarError:
-        return False
-
-bltn_open = open
-open = TarFile.open
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py
deleted file mode 100644
index ff328c8..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/compat.py
+++ /dev/null
@@ -1,1120 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2013-2017 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-from __future__ import absolute_import
-
-import os
-import re
-import sys
-
-try:
-    import ssl
-except ImportError:  # pragma: no cover
-    ssl = None
-
-if sys.version_info[0] < 3:  # pragma: no cover
-    from StringIO import StringIO
-    string_types = basestring,
-    text_type = unicode
-    from types import FileType as file_type
-    import __builtin__ as builtins
-    import ConfigParser as configparser
-    from ._backport import shutil
-    from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
-    from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
-                        pathname2url, ContentTooShortError, splittype)
-
-    def quote(s):
-        if isinstance(s, unicode):
-            s = s.encode('utf-8')
-        return _quote(s)
-
-    import urllib2
-    from urllib2 import (Request, urlopen, URLError, HTTPError,
-                         HTTPBasicAuthHandler, HTTPPasswordMgr,
-                         HTTPHandler, HTTPRedirectHandler,
-                         build_opener)
-    if ssl:
-        from urllib2 import HTTPSHandler
-    import httplib
-    import xmlrpclib
-    import Queue as queue
-    from HTMLParser import HTMLParser
-    import htmlentitydefs
-    raw_input = raw_input
-    from itertools import ifilter as filter
-    from itertools import ifilterfalse as filterfalse
-
-    _userprog = None
-    def splituser(host):
-        """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
-        global _userprog
-        if _userprog is None:
-            import re
-            _userprog = re.compile('^(.*)@(.*)$')
-
-        match = _userprog.match(host)
-        if match: return match.group(1, 2)
-        return None, host
-
-else:  # pragma: no cover
-    from io import StringIO
-    string_types = str,
-    text_type = str
-    from io import TextIOWrapper as file_type
-    import builtins
-    import configparser
-    import shutil
-    from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote,
-                              unquote, urlsplit, urlunsplit, splittype)
-    from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
-                                pathname2url,
-                                HTTPBasicAuthHandler, HTTPPasswordMgr,
-                                HTTPHandler, HTTPRedirectHandler,
-                                build_opener)
-    if ssl:
-        from urllib.request import HTTPSHandler
-    from urllib.error import HTTPError, URLError, ContentTooShortError
-    import http.client as httplib
-    import urllib.request as urllib2
-    import xmlrpc.client as xmlrpclib
-    import queue
-    from html.parser import HTMLParser
-    import html.entities as htmlentitydefs
-    raw_input = input
-    from itertools import filterfalse
-    filter = filter
-
-try:
-    from ssl import match_hostname, CertificateError
-except ImportError: # pragma: no cover
-    class CertificateError(ValueError):
-        pass
-
-
-    def _dnsname_match(dn, hostname, max_wildcards=1):
-        """Matching according to RFC 6125, section 6.4.3
-
-        http://tools.ietf.org/html/rfc6125#section-6.4.3
-        """
-        pats = []
-        if not dn:
-            return False
-
-        parts = dn.split('.')
-        leftmost, remainder = parts[0], parts[1:]
-
-        wildcards = leftmost.count('*')
-        if wildcards > max_wildcards:
-            # Issue #17980: avoid denials of service by refusing more
-            # than one wildcard per fragment.  A survey of established
-            # policy among SSL implementations showed it to be a
-            # reasonable choice.
-            raise CertificateError(
-                "too many wildcards in certificate DNS name: " + repr(dn))
-
-        # speed up common case w/o wildcards
-        if not wildcards:
-            return dn.lower() == hostname.lower()
-
-        # RFC 6125, section 6.4.3, subitem 1.
-        # The client SHOULD NOT attempt to match a presented identifier in which
-        # the wildcard character comprises a label other than the left-most label.
-        if leftmost == '*':
-            # When '*' is a fragment by itself, it matches a non-empty dotless
-            # fragment.
-            pats.append('[^.]+')
-        elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
-            # RFC 6125, section 6.4.3, subitem 3.
-            # The client SHOULD NOT attempt to match a presented identifier
-            # where the wildcard character is embedded within an A-label or
-            # U-label of an internationalized domain name.
-            pats.append(re.escape(leftmost))
-        else:
-            # Otherwise, '*' matches any dotless string, e.g. www*
-            pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
-        # add the remaining fragments, ignore any wildcards
-        for frag in remainder:
-            pats.append(re.escape(frag))
-
-        pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
-        return pat.match(hostname)
-
-
-    def match_hostname(cert, hostname):
-        """Verify that *cert* (in decoded format as returned by
-        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
-        rules are followed, but IP addresses are not accepted for *hostname*.
-
-        CertificateError is raised on failure. On success, the function
-        returns nothing.
-        """
-        if not cert:
-            raise ValueError("empty or no certificate, match_hostname needs a "
-                             "SSL socket or SSL context with either "
-                             "CERT_OPTIONAL or CERT_REQUIRED")
-        dnsnames = []
-        san = cert.get('subjectAltName', ())
-        for key, value in san:
-            if key == 'DNS':
-                if _dnsname_match(value, hostname):
-                    return
-                dnsnames.append(value)
-        if not dnsnames:
-            # The subject is only checked when there is no dNSName entry
-            # in subjectAltName
-            for sub in cert.get('subject', ()):
-                for key, value in sub:
-                    # XXX according to RFC 2818, the most specific Common Name
-                    # must be used.
-                    if key == 'commonName':
-                        if _dnsname_match(value, hostname):
-                            return
-                        dnsnames.append(value)
-        if len(dnsnames) > 1:
-            raise CertificateError("hostname %r "
-                "doesn't match either of %s"
-                % (hostname, ', '.join(map(repr, dnsnames))))
-        elif len(dnsnames) == 1:
-            raise CertificateError("hostname %r "
-                "doesn't match %r"
-                % (hostname, dnsnames[0]))
-        else:
-            raise CertificateError("no appropriate commonName or "
-                "subjectAltName fields were found")
-
-
-try:
-    from types import SimpleNamespace as Container
-except ImportError:  # pragma: no cover
-    class Container(object):
-        """
-        A generic container for when multiple values need to be returned
-        """
-        def __init__(self, **kwargs):
-            self.__dict__.update(kwargs)
-
-
-try:
-    from shutil import which
-except ImportError:  # pragma: no cover
-    # Implementation from Python 3.3
-    def which(cmd, mode=os.F_OK | os.X_OK, path=None):
-        """Given a command, mode, and a PATH string, return the path which
-        conforms to the given mode on the PATH, or None if there is no such
-        file.
-
-        `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
-        of os.environ.get("PATH"), or can be overridden with a custom search
-        path.
-
-        """
-        # Check that a given file can be accessed with the correct mode.
-        # Additionally check that `file` is not a directory, as on Windows
-        # directories pass the os.access check.
-        def _access_check(fn, mode):
-            return (os.path.exists(fn) and os.access(fn, mode)
-                    and not os.path.isdir(fn))
-
-        # If we're given a path with a directory part, look it up directly rather
-        # than referring to PATH directories. This includes checking relative to the
-        # current directory, e.g. ./script
-        if os.path.dirname(cmd):
-            if _access_check(cmd, mode):
-                return cmd
-            return None
-
-        if path is None:
-            path = os.environ.get("PATH", os.defpath)
-        if not path:
-            return None
-        path = path.split(os.pathsep)
-
-        if sys.platform == "win32":
-            # The current directory takes precedence on Windows.
-            if not os.curdir in path:
-                path.insert(0, os.curdir)
-
-            # PATHEXT is necessary to check on Windows.
-            pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
-            # See if the given file matches any of the expected path extensions.
-            # This will allow us to short circuit when given "python.exe".
-            # If it does match, only test that one, otherwise we have to try
-            # others.
-            if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
-                files = [cmd]
-            else:
-                files = [cmd + ext for ext in pathext]
-        else:
-            # On other platforms you don't have things like PATHEXT to tell you
-            # what file suffixes are executable, so just pass on cmd as-is.
-            files = [cmd]
-
-        seen = set()
-        for dir in path:
-            normdir = os.path.normcase(dir)
-            if not normdir in seen:
-                seen.add(normdir)
-                for thefile in files:
-                    name = os.path.join(dir, thefile)
-                    if _access_check(name, mode):
-                        return name
-        return None
-
-
-# ZipFile is a context manager in 2.7, but not in 2.6
-
-from zipfile import ZipFile as BaseZipFile
-
-if hasattr(BaseZipFile, '__enter__'):  # pragma: no cover
-    ZipFile = BaseZipFile
-else:  # pragma: no cover
-    from zipfile import ZipExtFile as BaseZipExtFile
-
-    class ZipExtFile(BaseZipExtFile):
-        def __init__(self, base):
-            self.__dict__.update(base.__dict__)
-
-        def __enter__(self):
-            return self
-
-        def __exit__(self, *exc_info):
-            self.close()
-            # return None, so if an exception occurred, it will propagate
-
-    class ZipFile(BaseZipFile):
-        def __enter__(self):
-            return self
-
-        def __exit__(self, *exc_info):
-            self.close()
-            # return None, so if an exception occurred, it will propagate
-
-        def open(self, *args, **kwargs):
-            base = BaseZipFile.open(self, *args, **kwargs)
-            return ZipExtFile(base)
-
-try:
-    from platform import python_implementation
-except ImportError: # pragma: no cover
-    def python_implementation():
-        """Return a string identifying the Python implementation."""
-        if 'PyPy' in sys.version:
-            return 'PyPy'
-        if os.name == 'java':
-            return 'Jython'
-        if sys.version.startswith('IronPython'):
-            return 'IronPython'
-        return 'CPython'
-
-try:
-    import sysconfig
-except ImportError: # pragma: no cover
-    from ._backport import sysconfig
-
-try:
-    callable = callable
-except NameError:   # pragma: no cover
-    from collections import Callable
-
-    def callable(obj):
-        return isinstance(obj, Callable)
-
-
-try:
-    fsencode = os.fsencode
-    fsdecode = os.fsdecode
-except AttributeError:  # pragma: no cover
-    # Issue #99: on some systems (e.g. containerised),
-    # sys.getfilesystemencoding() returns None, and we need a real value,
-    # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
-    # sys.getfilesystemencoding(): the return value is "the user’s preference
-    # according to the result of nl_langinfo(CODESET), or None if the
-    # nl_langinfo(CODESET) failed."
-    _fsencoding = sys.getfilesystemencoding() or 'utf-8'
-    if _fsencoding == 'mbcs':
-        _fserrors = 'strict'
-    else:
-        _fserrors = 'surrogateescape'
-
-    def fsencode(filename):
-        if isinstance(filename, bytes):
-            return filename
-        elif isinstance(filename, text_type):
-            return filename.encode(_fsencoding, _fserrors)
-        else:
-            raise TypeError("expect bytes or str, not %s" %
-                            type(filename).__name__)
-
-    def fsdecode(filename):
-        if isinstance(filename, text_type):
-            return filename
-        elif isinstance(filename, bytes):
-            return filename.decode(_fsencoding, _fserrors)
-        else:
-            raise TypeError("expect bytes or str, not %s" %
-                            type(filename).__name__)
-
-try:
-    from tokenize import detect_encoding
-except ImportError: # pragma: no cover
-    from codecs import BOM_UTF8, lookup
-    import re
-
-    cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
-
-    def _get_normal_name(orig_enc):
-        """Imitates get_normal_name in tokenizer.c."""
-        # Only care about the first 12 characters.
-        enc = orig_enc[:12].lower().replace("_", "-")
-        if enc == "utf-8" or enc.startswith("utf-8-"):
-            return "utf-8"
-        if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
-           enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
-            return "iso-8859-1"
-        return orig_enc
-
-    def detect_encoding(readline):
-        """
-        The detect_encoding() function is used to detect the encoding that should
-        be used to decode a Python source file.  It requires one argument, readline,
-        in the same way as the tokenize() generator.
-
-        It will call readline a maximum of twice, and return the encoding used
-        (as a string) and a list of any lines (left as bytes) it has read in.
-
-        It detects the encoding from the presence of a utf-8 bom or an encoding
-        cookie as specified in pep-0263.  If both a bom and a cookie are present,
-        but disagree, a SyntaxError will be raised.  If the encoding cookie is an
-        invalid charset, raise a SyntaxError.  Note that if a utf-8 bom is found,
-        'utf-8-sig' is returned.
-
-        If no encoding is specified, then the default of 'utf-8' will be returned.
-        """
-        try:
-            filename = readline.__self__.name
-        except AttributeError:
-            filename = None
-        bom_found = False
-        encoding = None
-        default = 'utf-8'
-        def read_or_stop():
-            try:
-                return readline()
-            except StopIteration:
-                return b''
-
-        def find_cookie(line):
-            try:
-                # Decode as UTF-8. Either the line is an encoding declaration,
-                # in which case it should be pure ASCII, or it must be UTF-8
-                # per default encoding.
-                line_string = line.decode('utf-8')
-            except UnicodeDecodeError:
-                msg = "invalid or missing encoding declaration"
-                if filename is not None:
-                    msg = '{} for {!r}'.format(msg, filename)
-                raise SyntaxError(msg)
-
-            matches = cookie_re.findall(line_string)
-            if not matches:
-                return None
-            encoding = _get_normal_name(matches[0])
-            try:
-                codec = lookup(encoding)
-            except LookupError:
-                # This behaviour mimics the Python interpreter
-                if filename is None:
-                    msg = "unknown encoding: " + encoding
-                else:
-                    msg = "unknown encoding for {!r}: {}".format(filename,
-                            encoding)
-                raise SyntaxError(msg)
-
-            if bom_found:
-                if codec.name != 'utf-8':
-                    # This behaviour mimics the Python interpreter
-                    if filename is None:
-                        msg = 'encoding problem: utf-8'
-                    else:
-                        msg = 'encoding problem for {!r}: utf-8'.format(filename)
-                    raise SyntaxError(msg)
-                encoding += '-sig'
-            return encoding
-
-        first = read_or_stop()
-        if first.startswith(BOM_UTF8):
-            bom_found = True
-            first = first[3:]
-            default = 'utf-8-sig'
-        if not first:
-            return default, []
-
-        encoding = find_cookie(first)
-        if encoding:
-            return encoding, [first]
-
-        second = read_or_stop()
-        if not second:
-            return default, [first]
-
-        encoding = find_cookie(second)
-        if encoding:
-            return encoding, [first, second]
-
-        return default, [first, second]
-
-# For converting & <-> & etc.
-try:
-    from html import escape
-except ImportError:
-    from cgi import escape
-if sys.version_info[:2] < (3, 4):
-    unescape = HTMLParser().unescape
-else:
-    from html import unescape
-
-try:
-    from collections import ChainMap
-except ImportError: # pragma: no cover
-    from collections import MutableMapping
-
-    try:
-        from reprlib import recursive_repr as _recursive_repr
-    except ImportError:
-        def _recursive_repr(fillvalue='...'):
-            '''
-            Decorator to make a repr function return fillvalue for a recursive
-            call
-            '''
-
-            def decorating_function(user_function):
-                repr_running = set()
-
-                def wrapper(self):
-                    key = id(self), get_ident()
-                    if key in repr_running:
-                        return fillvalue
-                    repr_running.add(key)
-                    try:
-                        result = user_function(self)
-                    finally:
-                        repr_running.discard(key)
-                    return result
-
-                # Can't use functools.wraps() here because of bootstrap issues
-                wrapper.__module__ = getattr(user_function, '__module__')
-                wrapper.__doc__ = getattr(user_function, '__doc__')
-                wrapper.__name__ = getattr(user_function, '__name__')
-                wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
-                return wrapper
-
-            return decorating_function
-
-    class ChainMap(MutableMapping):
-        ''' A ChainMap groups multiple dicts (or other mappings) together
-        to create a single, updateable view.
-
-        The underlying mappings are stored in a list.  That list is public and can
-        accessed or updated using the *maps* attribute.  There is no other state.
-
-        Lookups search the underlying mappings successively until a key is found.
-        In contrast, writes, updates, and deletions only operate on the first
-        mapping.
-
-        '''
-
-        def __init__(self, *maps):
-            '''Initialize a ChainMap by setting *maps* to the given mappings.
-            If no mappings are provided, a single empty dictionary is used.
-
-            '''
-            self.maps = list(maps) or [{}]          # always at least one map
-
-        def __missing__(self, key):
-            raise KeyError(key)
-
-        def __getitem__(self, key):
-            for mapping in self.maps:
-                try:
-                    return mapping[key]             # can't use 'key in mapping' with defaultdict
-                except KeyError:
-                    pass
-            return self.__missing__(key)            # support subclasses that define __missing__
-
-        def get(self, key, default=None):
-            return self[key] if key in self else default
-
-        def __len__(self):
-            return len(set().union(*self.maps))     # reuses stored hash values if possible
-
-        def __iter__(self):
-            return iter(set().union(*self.maps))
-
-        def __contains__(self, key):
-            return any(key in m for m in self.maps)
-
-        def __bool__(self):
-            return any(self.maps)
-
-        @_recursive_repr()
-        def __repr__(self):
-            return '{0.__class__.__name__}({1})'.format(
-                self, ', '.join(map(repr, self.maps)))
-
-        @classmethod
-        def fromkeys(cls, iterable, *args):
-            'Create a ChainMap with a single dict created from the iterable.'
-            return cls(dict.fromkeys(iterable, *args))
-
-        def copy(self):
-            'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
-            return self.__class__(self.maps[0].copy(), *self.maps[1:])
-
-        __copy__ = copy
-
-        def new_child(self):                        # like Django's Context.push()
-            'New ChainMap with a new dict followed by all previous maps.'
-            return self.__class__({}, *self.maps)
-
-        @property
-        def parents(self):                          # like Django's Context.pop()
-            'New ChainMap from maps[1:].'
-            return self.__class__(*self.maps[1:])
-
-        def __setitem__(self, key, value):
-            self.maps[0][key] = value
-
-        def __delitem__(self, key):
-            try:
-                del self.maps[0][key]
-            except KeyError:
-                raise KeyError('Key not found in the first mapping: {!r}'.format(key))
-
-        def popitem(self):
-            'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
-            try:
-                return self.maps[0].popitem()
-            except KeyError:
-                raise KeyError('No keys found in the first mapping.')
-
-        def pop(self, key, *args):
-            'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
-            try:
-                return self.maps[0].pop(key, *args)
-            except KeyError:
-                raise KeyError('Key not found in the first mapping: {!r}'.format(key))
-
-        def clear(self):
-            'Clear maps[0], leaving maps[1:] intact.'
-            self.maps[0].clear()
-
-try:
-    from importlib.util import cache_from_source  # Python >= 3.4
-except ImportError:  # pragma: no cover
-    try:
-        from imp import cache_from_source
-    except ImportError:  # pragma: no cover
-        def cache_from_source(path, debug_override=None):
-            assert path.endswith('.py')
-            if debug_override is None:
-                debug_override = __debug__
-            if debug_override:
-                suffix = 'c'
-            else:
-                suffix = 'o'
-            return path + suffix
-
-try:
-    from collections import OrderedDict
-except ImportError: # pragma: no cover
-## {{{ http://code.activestate.com/recipes/576693/ (r9)
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-    try:
-        from thread import get_ident as _get_ident
-    except ImportError:
-        from dummy_thread import get_ident as _get_ident
-
-    try:
-        from _abcoll import KeysView, ValuesView, ItemsView
-    except ImportError:
-        pass
-
-
-    class OrderedDict(dict):
-        'Dictionary that remembers insertion order'
-        # An inherited dict maps keys to values.
-        # The inherited dict provides __getitem__, __len__, __contains__, and get.
-        # The remaining methods are order-aware.
-        # Big-O running times for all methods are the same as for regular dictionaries.
-
-        # The internal self.__map dictionary maps keys to links in a doubly linked list.
-        # The circular doubly linked list starts and ends with a sentinel element.
-        # The sentinel element never gets deleted (this simplifies the algorithm).
-        # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
-
-        def __init__(self, *args, **kwds):
-            '''Initialize an ordered dictionary.  Signature is the same as for
-            regular dictionaries, but keyword arguments are not recommended
-            because their insertion order is arbitrary.
-
-            '''
-            if len(args) > 1:
-                raise TypeError('expected at most 1 arguments, got %d' % len(args))
-            try:
-                self.__root
-            except AttributeError:
-                self.__root = root = []                     # sentinel node
-                root[:] = [root, root, None]
-                self.__map = {}
-            self.__update(*args, **kwds)
-
-        def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-            'od.__setitem__(i, y) <==> od[i]=y'
-            # Setting a new item creates a new link which goes at the end of the linked
-            # list, and the inherited dictionary is updated with the new key/value pair.
-            if key not in self:
-                root = self.__root
-                last = root[0]
-                last[1] = root[0] = self.__map[key] = [last, root, key]
-            dict_setitem(self, key, value)
-
-        def __delitem__(self, key, dict_delitem=dict.__delitem__):
-            'od.__delitem__(y) <==> del od[y]'
-            # Deleting an existing item uses self.__map to find the link which is
-            # then removed by updating the links in the predecessor and successor nodes.
-            dict_delitem(self, key)
-            link_prev, link_next, key = self.__map.pop(key)
-            link_prev[1] = link_next
-            link_next[0] = link_prev
-
-        def __iter__(self):
-            'od.__iter__() <==> iter(od)'
-            root = self.__root
-            curr = root[1]
-            while curr is not root:
-                yield curr[2]
-                curr = curr[1]
-
-        def __reversed__(self):
-            'od.__reversed__() <==> reversed(od)'
-            root = self.__root
-            curr = root[0]
-            while curr is not root:
-                yield curr[2]
-                curr = curr[0]
-
-        def clear(self):
-            'od.clear() -> None.  Remove all items from od.'
-            try:
-                for node in self.__map.itervalues():
-                    del node[:]
-                root = self.__root
-                root[:] = [root, root, None]
-                self.__map.clear()
-            except AttributeError:
-                pass
-            dict.clear(self)
-
-        def popitem(self, last=True):
-            '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-            Pairs are returned in LIFO order if last is true or FIFO order if false.
-
-            '''
-            if not self:
-                raise KeyError('dictionary is empty')
-            root = self.__root
-            if last:
-                link = root[0]
-                link_prev = link[0]
-                link_prev[1] = root
-                root[0] = link_prev
-            else:
-                link = root[1]
-                link_next = link[1]
-                root[1] = link_next
-                link_next[0] = root
-            key = link[2]
-            del self.__map[key]
-            value = dict.pop(self, key)
-            return key, value
-
-        # -- the following methods do not depend on the internal structure --
-
-        def keys(self):
-            'od.keys() -> list of keys in od'
-            return list(self)
-
-        def values(self):
-            'od.values() -> list of values in od'
-            return [self[key] for key in self]
-
-        def items(self):
-            'od.items() -> list of (key, value) pairs in od'
-            return [(key, self[key]) for key in self]
-
-        def iterkeys(self):
-            'od.iterkeys() -> an iterator over the keys in od'
-            return iter(self)
-
-        def itervalues(self):
-            'od.itervalues -> an iterator over the values in od'
-            for k in self:
-                yield self[k]
-
-        def iteritems(self):
-            'od.iteritems -> an iterator over the (key, value) items in od'
-            for k in self:
-                yield (k, self[k])
-
-        def update(*args, **kwds):
-            '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
-
-            If E is a dict instance, does:           for k in E: od[k] = E[k]
-            If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
-            Or if E is an iterable of items, does:   for k, v in E: od[k] = v
-            In either case, this is followed by:     for k, v in F.items(): od[k] = v
-
-            '''
-            if len(args) > 2:
-                raise TypeError('update() takes at most 2 positional '
-                                'arguments (%d given)' % (len(args),))
-            elif not args:
-                raise TypeError('update() takes at least 1 argument (0 given)')
-            self = args[0]
-            # Make progressively weaker assumptions about "other"
-            other = ()
-            if len(args) == 2:
-                other = args[1]
-            if isinstance(other, dict):
-                for key in other:
-                    self[key] = other[key]
-            elif hasattr(other, 'keys'):
-                for key in other.keys():
-                    self[key] = other[key]
-            else:
-                for key, value in other:
-                    self[key] = value
-            for key, value in kwds.items():
-                self[key] = value
-
-        __update = update  # let subclasses override update without breaking __init__
-
-        __marker = object()
-
-        def pop(self, key, default=__marker):
-            '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
-            If key is not found, d is returned if given, otherwise KeyError is raised.
-
-            '''
-            if key in self:
-                result = self[key]
-                del self[key]
-                return result
-            if default is self.__marker:
-                raise KeyError(key)
-            return default
-
-        def setdefault(self, key, default=None):
-            'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-            if key in self:
-                return self[key]
-            self[key] = default
-            return default
-
-        def __repr__(self, _repr_running=None):
-            'od.__repr__() <==> repr(od)'
-            if not _repr_running: _repr_running = {}
-            call_key = id(self), _get_ident()
-            if call_key in _repr_running:
-                return '...'
-            _repr_running[call_key] = 1
-            try:
-                if not self:
-                    return '%s()' % (self.__class__.__name__,)
-                return '%s(%r)' % (self.__class__.__name__, self.items())
-            finally:
-                del _repr_running[call_key]
-
-        def __reduce__(self):
-            'Return state information for pickling'
-            items = [[k, self[k]] for k in self]
-            inst_dict = vars(self).copy()
-            for k in vars(OrderedDict()):
-                inst_dict.pop(k, None)
-            if inst_dict:
-                return (self.__class__, (items,), inst_dict)
-            return self.__class__, (items,)
-
-        def copy(self):
-            'od.copy() -> a shallow copy of od'
-            return self.__class__(self)
-
-        @classmethod
-        def fromkeys(cls, iterable, value=None):
-            '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
-            and values equal to v (which defaults to None).
-
-            '''
-            d = cls()
-            for key in iterable:
-                d[key] = value
-            return d
-
-        def __eq__(self, other):
-            '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
-            while comparison to a regular mapping is order-insensitive.
-
-            '''
-            if isinstance(other, OrderedDict):
-                return len(self)==len(other) and self.items() == other.items()
-            return dict.__eq__(self, other)
-
-        def __ne__(self, other):
-            return not self == other
-
-        # -- the following methods are only used in Python 2.7 --
-
-        def viewkeys(self):
-            "od.viewkeys() -> a set-like object providing a view on od's keys"
-            return KeysView(self)
-
-        def viewvalues(self):
-            "od.viewvalues() -> an object providing a view on od's values"
-            return ValuesView(self)
-
-        def viewitems(self):
-            "od.viewitems() -> a set-like object providing a view on od's items"
-            return ItemsView(self)
-
-try:
-    from logging.config import BaseConfigurator, valid_ident
-except ImportError: # pragma: no cover
-    IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
-
-
-    def valid_ident(s):
-        m = IDENTIFIER.match(s)
-        if not m:
-            raise ValueError('Not a valid Python identifier: %r' % s)
-        return True
-
-
-    # The ConvertingXXX classes are wrappers around standard Python containers,
-    # and they serve to convert any suitable values in the container. The
-    # conversion converts base dicts, lists and tuples to their wrapped
-    # equivalents, whereas strings which match a conversion format are converted
-    # appropriately.
-    #
-    # Each wrapper should have a configurator attribute holding the actual
-    # configurator to use for conversion.
-
-    class ConvertingDict(dict):
-        """A converting dictionary wrapper."""
-
-        def __getitem__(self, key):
-            value = dict.__getitem__(self, key)
-            result = self.configurator.convert(value)
-            #If the converted value is different, save for next time
-            if value is not result:
-                self[key] = result
-                if type(result) in (ConvertingDict, ConvertingList,
-                                    ConvertingTuple):
-                    result.parent = self
-                    result.key = key
-            return result
-
-        def get(self, key, default=None):
-            value = dict.get(self, key, default)
-            result = self.configurator.convert(value)
-            #If the converted value is different, save for next time
-            if value is not result:
-                self[key] = result
-                if type(result) in (ConvertingDict, ConvertingList,
-                                    ConvertingTuple):
-                    result.parent = self
-                    result.key = key
-            return result
-
-    def pop(self, key, default=None):
-        value = dict.pop(self, key, default)
-        result = self.configurator.convert(value)
-        if value is not result:
-            if type(result) in (ConvertingDict, ConvertingList,
-                                ConvertingTuple):
-                result.parent = self
-                result.key = key
-        return result
-
-    class ConvertingList(list):
-        """A converting list wrapper."""
-        def __getitem__(self, key):
-            value = list.__getitem__(self, key)
-            result = self.configurator.convert(value)
-            #If the converted value is different, save for next time
-            if value is not result:
-                self[key] = result
-                if type(result) in (ConvertingDict, ConvertingList,
-                                    ConvertingTuple):
-                    result.parent = self
-                    result.key = key
-            return result
-
-        def pop(self, idx=-1):
-            value = list.pop(self, idx)
-            result = self.configurator.convert(value)
-            if value is not result:
-                if type(result) in (ConvertingDict, ConvertingList,
-                                    ConvertingTuple):
-                    result.parent = self
-            return result
-
-    class ConvertingTuple(tuple):
-        """A converting tuple wrapper."""
-        def __getitem__(self, key):
-            value = tuple.__getitem__(self, key)
-            result = self.configurator.convert(value)
-            if value is not result:
-                if type(result) in (ConvertingDict, ConvertingList,
-                                    ConvertingTuple):
-                    result.parent = self
-                    result.key = key
-            return result
-
-    class BaseConfigurator(object):
-        """
-        The configurator base class which defines some useful defaults.
-        """
-
-        CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$')
-
-        WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
-        DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
-        INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
-        DIGIT_PATTERN = re.compile(r'^\d+$')
-
-        value_converters = {
-            'ext' : 'ext_convert',
-            'cfg' : 'cfg_convert',
-        }
-
-        # We might want to use a different one, e.g. importlib
-        importer = staticmethod(__import__)
-
-        def __init__(self, config):
-            self.config = ConvertingDict(config)
-            self.config.configurator = self
-
-        def resolve(self, s):
-            """
-            Resolve strings to objects using standard import and attribute
-            syntax.
-            """
-            name = s.split('.')
-            used = name.pop(0)
-            try:
-                found = self.importer(used)
-                for frag in name:
-                    used += '.' + frag
-                    try:
-                        found = getattr(found, frag)
-                    except AttributeError:
-                        self.importer(used)
-                        found = getattr(found, frag)
-                return found
-            except ImportError:
-                e, tb = sys.exc_info()[1:]
-                v = ValueError('Cannot resolve %r: %s' % (s, e))
-                v.__cause__, v.__traceback__ = e, tb
-                raise v
-
-        def ext_convert(self, value):
-            """Default converter for the ext:// protocol."""
-            return self.resolve(value)
-
-        def cfg_convert(self, value):
-            """Default converter for the cfg:// protocol."""
-            rest = value
-            m = self.WORD_PATTERN.match(rest)
-            if m is None:
-                raise ValueError("Unable to convert %r" % value)
-            else:
-                rest = rest[m.end():]
-                d = self.config[m.groups()[0]]
-                #print d, rest
-                while rest:
-                    m = self.DOT_PATTERN.match(rest)
-                    if m:
-                        d = d[m.groups()[0]]
-                    else:
-                        m = self.INDEX_PATTERN.match(rest)
-                        if m:
-                            idx = m.groups()[0]
-                            if not self.DIGIT_PATTERN.match(idx):
-                                d = d[idx]
-                            else:
-                                try:
-                                    n = int(idx) # try as number first (most likely)
-                                    d = d[n]
-                                except TypeError:
-                                    d = d[idx]
-                    if m:
-                        rest = rest[m.end():]
-                    else:
-                        raise ValueError('Unable to convert '
-                                         '%r at %r' % (value, rest))
-            #rest should be empty
-            return d
-
-        def convert(self, value):
-            """
-            Convert values to an appropriate type. dicts, lists and tuples are
-            replaced by their converting alternatives. Strings are checked to
-            see if they have a conversion format and are converted if they do.
-            """
-            if not isinstance(value, ConvertingDict) and isinstance(value, dict):
-                value = ConvertingDict(value)
-                value.configurator = self
-            elif not isinstance(value, ConvertingList) and isinstance(value, list):
-                value = ConvertingList(value)
-                value.configurator = self
-            elif not isinstance(value, ConvertingTuple) and\
-                     isinstance(value, tuple):
-                value = ConvertingTuple(value)
-                value.configurator = self
-            elif isinstance(value, string_types):
-                m = self.CONVERT_PATTERN.match(value)
-                if m:
-                    d = m.groupdict()
-                    prefix = d['prefix']
-                    converter = self.value_converters.get(prefix, None)
-                    if converter:
-                        suffix = d['suffix']
-                        converter = getattr(self, converter)
-                        value = converter(suffix)
-            return value
-
-        def configure_custom(self, config):
-            """Configure an object with a user-supplied factory."""
-            c = config.pop('()')
-            if not callable(c):
-                c = self.resolve(c)
-            props = config.pop('.', None)
-            # Check for valid identifiers
-            kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
-            result = c(**kwargs)
-            if props:
-                for name, value in props.items():
-                    setattr(result, name, value)
-            return result
-
-        def as_tuple(self, value):
-            """Utility function which converts lists to tuples."""
-            if isinstance(value, list):
-                value = tuple(value)
-            return value
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/database.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/database.py
deleted file mode 100644
index b13cdac..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/database.py
+++ /dev/null
@@ -1,1339 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012-2017 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""PEP 376 implementation."""
-
-from __future__ import unicode_literals
-
-import base64
-import codecs
-import contextlib
-import hashlib
-import logging
-import os
-import posixpath
-import sys
-import zipimport
-
-from . import DistlibException, resources
-from .compat import StringIO
-from .version import get_scheme, UnsupportedVersionError
-from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
-                       LEGACY_METADATA_FILENAME)
-from .util import (parse_requirement, cached_property, parse_name_and_version,
-                   read_exports, write_exports, CSVReader, CSVWriter)
-
-
-__all__ = ['Distribution', 'BaseInstalledDistribution',
-           'InstalledDistribution', 'EggInfoDistribution',
-           'DistributionPath']
-
-
-logger = logging.getLogger(__name__)
-
-EXPORTS_FILENAME = 'pydist-exports.json'
-COMMANDS_FILENAME = 'pydist-commands.json'
-
-DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
-              'RESOURCES', EXPORTS_FILENAME, 'SHARED')
-
-DISTINFO_EXT = '.dist-info'
-
-
-class _Cache(object):
-    """
-    A simple cache mapping names and .dist-info paths to distributions
-    """
-    def __init__(self):
-        """
-        Initialise an instance. There is normally one for each DistributionPath.
-        """
-        self.name = {}
-        self.path = {}
-        self.generated = False
-
-    def clear(self):
-        """
-        Clear the cache, setting it to its initial state.
-        """
-        self.name.clear()
-        self.path.clear()
-        self.generated = False
-
-    def add(self, dist):
-        """
-        Add a distribution to the cache.
-        :param dist: The distribution to add.
-        """
-        if dist.path not in self.path:
-            self.path[dist.path] = dist
-            self.name.setdefault(dist.key, []).append(dist)
-
-
-class DistributionPath(object):
-    """
-    Represents a set of distributions installed on a path (typically sys.path).
-    """
-    def __init__(self, path=None, include_egg=False):
-        """
-        Create an instance from a path, optionally including legacy (distutils/
-        setuptools/distribute) distributions.
-        :param path: The path to use, as a list of directories. If not specified,
-                     sys.path is used.
-        :param include_egg: If True, this instance will look for and return legacy
-                            distributions as well as those based on PEP 376.
-        """
-        if path is None:
-            path = sys.path
-        self.path = path
-        self._include_dist = True
-        self._include_egg = include_egg
-
-        self._cache = _Cache()
-        self._cache_egg = _Cache()
-        self._cache_enabled = True
-        self._scheme = get_scheme('default')
-
-    def _get_cache_enabled(self):
-        return self._cache_enabled
-
-    def _set_cache_enabled(self, value):
-        self._cache_enabled = value
-
-    cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
-
-    def clear_cache(self):
-        """
-        Clears the internal cache.
-        """
-        self._cache.clear()
-        self._cache_egg.clear()
-
-
-    def _yield_distributions(self):
-        """
-        Yield .dist-info and/or .egg(-info) distributions.
-        """
-        # We need to check if we've seen some resources already, because on
-        # some Linux systems (e.g. some Debian/Ubuntu variants) there are
-        # symlinks which alias other files in the environment.
-        seen = set()
-        for path in self.path:
-            finder = resources.finder_for_path(path)
-            if finder is None:
-                continue
-            r = finder.find('')
-            if not r or not r.is_container:
-                continue
-            rset = sorted(r.resources)
-            for entry in rset:
-                r = finder.find(entry)
-                if not r or r.path in seen:
-                    continue
-                if self._include_dist and entry.endswith(DISTINFO_EXT):
-                    possible_filenames = [METADATA_FILENAME,
-                                          WHEEL_METADATA_FILENAME,
-                                          LEGACY_METADATA_FILENAME]
-                    for metadata_filename in possible_filenames:
-                        metadata_path = posixpath.join(entry, metadata_filename)
-                        pydist = finder.find(metadata_path)
-                        if pydist:
-                            break
-                    else:
-                        continue
-
-                    with contextlib.closing(pydist.as_stream()) as stream:
-                        metadata = Metadata(fileobj=stream, scheme='legacy')
-                    logger.debug('Found %s', r.path)
-                    seen.add(r.path)
-                    yield new_dist_class(r.path, metadata=metadata,
-                                         env=self)
-                elif self._include_egg and entry.endswith(('.egg-info',
-                                                          '.egg')):
-                    logger.debug('Found %s', r.path)
-                    seen.add(r.path)
-                    yield old_dist_class(r.path, self)
-
-    def _generate_cache(self):
-        """
-        Scan the path for distributions and populate the cache with
-        those that are found.
-        """
-        gen_dist = not self._cache.generated
-        gen_egg = self._include_egg and not self._cache_egg.generated
-        if gen_dist or gen_egg:
-            for dist in self._yield_distributions():
-                if isinstance(dist, InstalledDistribution):
-                    self._cache.add(dist)
-                else:
-                    self._cache_egg.add(dist)
-
-            if gen_dist:
-                self._cache.generated = True
-            if gen_egg:
-                self._cache_egg.generated = True
-
-    @classmethod
-    def distinfo_dirname(cls, name, version):
-        """
-        The *name* and *version* parameters are converted into their
-        filename-escaped form, i.e. any ``'-'`` characters are replaced
-        with ``'_'`` other than the one in ``'dist-info'`` and the one
-        separating the name from the version number.
-
-        :parameter name: is converted to a standard distribution name by replacing
-                         any runs of non- alphanumeric characters with a single
-                         ``'-'``.
-        :type name: string
-        :parameter version: is converted to a standard version string. Spaces
-                            become dots, and all other non-alphanumeric characters
-                            (except dots) become dashes, with runs of multiple
-                            dashes condensed to a single dash.
-        :type version: string
-        :returns: directory name
-        :rtype: string"""
-        name = name.replace('-', '_')
-        return '-'.join([name, version]) + DISTINFO_EXT
-
-    def get_distributions(self):
-        """
-        Provides an iterator that looks for distributions and returns
-        :class:`InstalledDistribution` or
-        :class:`EggInfoDistribution` instances for each one of them.
-
-        :rtype: iterator of :class:`InstalledDistribution` and
-                :class:`EggInfoDistribution` instances
-        """
-        if not self._cache_enabled:
-            for dist in self._yield_distributions():
-                yield dist
-        else:
-            self._generate_cache()
-
-            for dist in self._cache.path.values():
-                yield dist
-
-            if self._include_egg:
-                for dist in self._cache_egg.path.values():
-                    yield dist
-
-    def get_distribution(self, name):
-        """
-        Looks for a named distribution on the path.
-
-        This function only returns the first result found, as no more than one
-        value is expected. If nothing is found, ``None`` is returned.
-
-        :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
-                or ``None``
-        """
-        result = None
-        name = name.lower()
-        if not self._cache_enabled:
-            for dist in self._yield_distributions():
-                if dist.key == name:
-                    result = dist
-                    break
-        else:
-            self._generate_cache()
-
-            if name in self._cache.name:
-                result = self._cache.name[name][0]
-            elif self._include_egg and name in self._cache_egg.name:
-                result = self._cache_egg.name[name][0]
-        return result
-
-    def provides_distribution(self, name, version=None):
-        """
-        Iterates over all distributions to find which distributions provide *name*.
-        If a *version* is provided, it will be used to filter the results.
-
-        This function only returns the first result found, since no more than
-        one values are expected. If the directory is not found, returns ``None``.
-
-        :parameter version: a version specifier that indicates the version
-                            required, conforming to the format in ``PEP-345``
-
-        :type name: string
-        :type version: string
-        """
-        matcher = None
-        if version is not None:
-            try:
-                matcher = self._scheme.matcher('%s (%s)' % (name, version))
-            except ValueError:
-                raise DistlibException('invalid name or version: %r, %r' %
-                                      (name, version))
-
-        for dist in self.get_distributions():
-            # We hit a problem on Travis where enum34 was installed and doesn't
-            # have a provides attribute ...
-            if not hasattr(dist, 'provides'):
-                logger.debug('No "provides": %s', dist)
-            else:
-                provided = dist.provides
-
-                for p in provided:
-                    p_name, p_ver = parse_name_and_version(p)
-                    if matcher is None:
-                        if p_name == name:
-                            yield dist
-                            break
-                    else:
-                        if p_name == name and matcher.match(p_ver):
-                            yield dist
-                            break
-
-    def get_file_path(self, name, relative_path):
-        """
-        Return the path to a resource file.
-        """
-        dist = self.get_distribution(name)
-        if dist is None:
-            raise LookupError('no distribution named %r found' % name)
-        return dist.get_resource_path(relative_path)
-
-    def get_exported_entries(self, category, name=None):
-        """
-        Return all of the exported entries in a particular category.
-
-        :param category: The category to search for entries.
-        :param name: If specified, only entries with that name are returned.
-        """
-        for dist in self.get_distributions():
-            r = dist.exports
-            if category in r:
-                d = r[category]
-                if name is not None:
-                    if name in d:
-                        yield d[name]
-                else:
-                    for v in d.values():
-                        yield v
-
-
-class Distribution(object):
-    """
-    A base class for distributions, whether installed or from indexes.
-    Either way, it must have some metadata, so that's all that's needed
-    for construction.
-    """
-
-    build_time_dependency = False
-    """
-    Set to True if it's known to be only a build-time dependency (i.e.
-    not needed after installation).
-    """
-
-    requested = False
-    """A boolean that indicates whether the ``REQUESTED`` metadata file is
-    present (in other words, whether the package was installed by user
-    request or it was installed as a dependency)."""
-
-    def __init__(self, metadata):
-        """
-        Initialise an instance.
-        :param metadata: The instance of :class:`Metadata` describing this
-        distribution.
-        """
-        self.metadata = metadata
-        self.name = metadata.name
-        self.key = self.name.lower()    # for case-insensitive comparisons
-        self.version = metadata.version
-        self.locator = None
-        self.digest = None
-        self.extras = None      # additional features requested
-        self.context = None     # environment marker overrides
-        self.download_urls = set()
-        self.digests = {}
-
-    @property
-    def source_url(self):
-        """
-        The source archive download URL for this distribution.
-        """
-        return self.metadata.source_url
-
-    download_url = source_url   # Backward compatibility
-
-    @property
-    def name_and_version(self):
-        """
-        A utility property which displays the name and version in parentheses.
-        """
-        return '%s (%s)' % (self.name, self.version)
-
-    @property
-    def provides(self):
-        """
-        A set of distribution names and versions provided by this distribution.
-        :return: A set of "name (version)" strings.
-        """
-        plist = self.metadata.provides
-        s = '%s (%s)' % (self.name, self.version)
-        if s not in plist:
-            plist.append(s)
-        return plist
-
-    def _get_requirements(self, req_attr):
-        md = self.metadata
-        logger.debug('Getting requirements from metadata %r', md.todict())
-        reqts = getattr(md, req_attr)
-        return set(md.get_requirements(reqts, extras=self.extras,
-                                       env=self.context))
-
-    @property
-    def run_requires(self):
-        return self._get_requirements('run_requires')
-
-    @property
-    def meta_requires(self):
-        return self._get_requirements('meta_requires')
-
-    @property
-    def build_requires(self):
-        return self._get_requirements('build_requires')
-
-    @property
-    def test_requires(self):
-        return self._get_requirements('test_requires')
-
-    @property
-    def dev_requires(self):
-        return self._get_requirements('dev_requires')
-
-    def matches_requirement(self, req):
-        """
-        Say if this instance matches (fulfills) a requirement.
-        :param req: The requirement to match.
-        :rtype req: str
-        :return: True if it matches, else False.
-        """
-        # Requirement may contain extras - parse to lose those
-        # from what's passed to the matcher
-        r = parse_requirement(req)
-        scheme = get_scheme(self.metadata.scheme)
-        try:
-            matcher = scheme.matcher(r.requirement)
-        except UnsupportedVersionError:
-            # XXX compat-mode if cannot read the version
-            logger.warning('could not read version %r - using name only',
-                           req)
-            name = req.split()[0]
-            matcher = scheme.matcher(name)
-
-        name = matcher.key   # case-insensitive
-
-        result = False
-        for p in self.provides:
-            p_name, p_ver = parse_name_and_version(p)
-            if p_name != name:
-                continue
-            try:
-                result = matcher.match(p_ver)
-                break
-            except UnsupportedVersionError:
-                pass
-        return result
-
-    def __repr__(self):
-        """
-        Return a textual representation of this instance,
-        """
-        if self.source_url:
-            suffix = ' [%s]' % self.source_url
-        else:
-            suffix = ''
-        return '' % (self.name, self.version, suffix)
-
-    def __eq__(self, other):
-        """
-        See if this distribution is the same as another.
-        :param other: The distribution to compare with. To be equal to one
-                      another. distributions must have the same type, name,
-                      version and source_url.
-        :return: True if it is the same, else False.
-        """
-        if type(other) is not type(self):
-            result = False
-        else:
-            result = (self.name == other.name and
-                      self.version == other.version and
-                      self.source_url == other.source_url)
-        return result
-
-    def __hash__(self):
-        """
-        Compute hash in a way which matches the equality test.
-        """
-        return hash(self.name) + hash(self.version) + hash(self.source_url)
-
-
-class BaseInstalledDistribution(Distribution):
-    """
-    This is the base class for installed distributions (whether PEP 376 or
-    legacy).
-    """
-
-    hasher = None
-
-    def __init__(self, metadata, path, env=None):
-        """
-        Initialise an instance.
-        :param metadata: An instance of :class:`Metadata` which describes the
-                         distribution. This will normally have been initialised
-                         from a metadata file in the ``path``.
-        :param path:     The path of the ``.dist-info`` or ``.egg-info``
-                         directory for the distribution.
-        :param env:      This is normally the :class:`DistributionPath`
-                         instance where this distribution was found.
-        """
-        super(BaseInstalledDistribution, self).__init__(metadata)
-        self.path = path
-        self.dist_path = env
-
-    def get_hash(self, data, hasher=None):
-        """
-        Get the hash of some data, using a particular hash algorithm, if
-        specified.
-
-        :param data: The data to be hashed.
-        :type data: bytes
-        :param hasher: The name of a hash implementation, supported by hashlib,
-                       or ``None``. Examples of valid values are ``'sha1'``,
-                       ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
-                       ``'sha512'``. If no hasher is specified, the ``hasher``
-                       attribute of the :class:`InstalledDistribution` instance
-                       is used. If the hasher is determined to be ``None``, MD5
-                       is used as the hashing algorithm.
-        :returns: The hash of the data. If a hasher was explicitly specified,
-                  the returned hash will be prefixed with the specified hasher
-                  followed by '='.
-        :rtype: str
-        """
-        if hasher is None:
-            hasher = self.hasher
-        if hasher is None:
-            hasher = hashlib.md5
-            prefix = ''
-        else:
-            hasher = getattr(hashlib, hasher)
-            prefix = '%s=' % self.hasher
-        digest = hasher(data).digest()
-        digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
-        return '%s%s' % (prefix, digest)
-
-
-class InstalledDistribution(BaseInstalledDistribution):
-    """
-    Created with the *path* of the ``.dist-info`` directory provided to the
-    constructor. It reads the metadata contained in ``pydist.json`` when it is
-    instantiated., or uses a passed in Metadata instance (useful for when
-    dry-run mode is being used).
-    """
-
-    hasher = 'sha256'
-
-    def __init__(self, path, metadata=None, env=None):
-        self.modules = []
-        self.finder = finder = resources.finder_for_path(path)
-        if finder is None:
-            raise ValueError('finder unavailable for %s' % path)
-        if env and env._cache_enabled and path in env._cache.path:
-            metadata = env._cache.path[path].metadata
-        elif metadata is None:
-            r = finder.find(METADATA_FILENAME)
-            # Temporary - for Wheel 0.23 support
-            if r is None:
-                r = finder.find(WHEEL_METADATA_FILENAME)
-            # Temporary - for legacy support
-            if r is None:
-                r = finder.find('METADATA')
-            if r is None:
-                raise ValueError('no %s found in %s' % (METADATA_FILENAME,
-                                                        path))
-            with contextlib.closing(r.as_stream()) as stream:
-                metadata = Metadata(fileobj=stream, scheme='legacy')
-
-        super(InstalledDistribution, self).__init__(metadata, path, env)
-
-        if env and env._cache_enabled:
-            env._cache.add(self)
-
-        r = finder.find('REQUESTED')
-        self.requested = r is not None
-        p  = os.path.join(path, 'top_level.txt')
-        if os.path.exists(p):
-            with open(p, 'rb') as f:
-                data = f.read()
-            self.modules = data.splitlines()
-
-    def __repr__(self):
-        return '' % (
-            self.name, self.version, self.path)
-
-    def __str__(self):
-        return "%s %s" % (self.name, self.version)
-
-    def _get_records(self):
-        """
-        Get the list of installed files for the distribution
-        :return: A list of tuples of path, hash and size. Note that hash and
-                 size might be ``None`` for some entries. The path is exactly
-                 as stored in the file (which is as in PEP 376).
-        """
-        results = []
-        r = self.get_distinfo_resource('RECORD')
-        with contextlib.closing(r.as_stream()) as stream:
-            with CSVReader(stream=stream) as record_reader:
-                # Base location is parent dir of .dist-info dir
-                #base_location = os.path.dirname(self.path)
-                #base_location = os.path.abspath(base_location)
-                for row in record_reader:
-                    missing = [None for i in range(len(row), 3)]
-                    path, checksum, size = row + missing
-                    #if not os.path.isabs(path):
-                    #    path = path.replace('/', os.sep)
-                    #    path = os.path.join(base_location, path)
-                    results.append((path, checksum, size))
-        return results
-
-    @cached_property
-    def exports(self):
-        """
-        Return the information exported by this distribution.
-        :return: A dictionary of exports, mapping an export category to a dict
-                 of :class:`ExportEntry` instances describing the individual
-                 export entries, and keyed by name.
-        """
-        result = {}
-        r = self.get_distinfo_resource(EXPORTS_FILENAME)
-        if r:
-            result = self.read_exports()
-        return result
-
-    def read_exports(self):
-        """
-        Read exports data from a file in .ini format.
-
-        :return: A dictionary of exports, mapping an export category to a list
-                 of :class:`ExportEntry` instances describing the individual
-                 export entries.
-        """
-        result = {}
-        r = self.get_distinfo_resource(EXPORTS_FILENAME)
-        if r:
-            with contextlib.closing(r.as_stream()) as stream:
-                result = read_exports(stream)
-        return result
-
-    def write_exports(self, exports):
-        """
-        Write a dictionary of exports to a file in .ini format.
-        :param exports: A dictionary of exports, mapping an export category to
-                        a list of :class:`ExportEntry` instances describing the
-                        individual export entries.
-        """
-        rf = self.get_distinfo_file(EXPORTS_FILENAME)
-        with open(rf, 'w') as f:
-            write_exports(exports, f)
-
-    def get_resource_path(self, relative_path):
-        """
-        NOTE: This API may change in the future.
-
-        Return the absolute path to a resource file with the given relative
-        path.
-
-        :param relative_path: The path, relative to .dist-info, of the resource
-                              of interest.
-        :return: The absolute path where the resource is to be found.
-        """
-        r = self.get_distinfo_resource('RESOURCES')
-        with contextlib.closing(r.as_stream()) as stream:
-            with CSVReader(stream=stream) as resources_reader:
-                for relative, destination in resources_reader:
-                    if relative == relative_path:
-                        return destination
-        raise KeyError('no resource file with relative path %r '
-                       'is installed' % relative_path)
-
-    def list_installed_files(self):
-        """
-        Iterates over the ``RECORD`` entries and returns a tuple
-        ``(path, hash, size)`` for each line.
-
-        :returns: iterator of (path, hash, size)
-        """
-        for result in self._get_records():
-            yield result
-
-    def write_installed_files(self, paths, prefix, dry_run=False):
-        """
-        Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
-        existing ``RECORD`` file is silently overwritten.
-
-        prefix is used to determine when to write absolute paths.
-        """
-        prefix = os.path.join(prefix, '')
-        base = os.path.dirname(self.path)
-        base_under_prefix = base.startswith(prefix)
-        base = os.path.join(base, '')
-        record_path = self.get_distinfo_file('RECORD')
-        logger.info('creating %s', record_path)
-        if dry_run:
-            return None
-        with CSVWriter(record_path) as writer:
-            for path in paths:
-                if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
-                    # do not put size and hash, as in PEP-376
-                    hash_value = size = ''
-                else:
-                    size = '%d' % os.path.getsize(path)
-                    with open(path, 'rb') as fp:
-                        hash_value = self.get_hash(fp.read())
-                if path.startswith(base) or (base_under_prefix and
-                                             path.startswith(prefix)):
-                    path = os.path.relpath(path, base)
-                writer.writerow((path, hash_value, size))
-
-            # add the RECORD file itself
-            if record_path.startswith(base):
-                record_path = os.path.relpath(record_path, base)
-            writer.writerow((record_path, '', ''))
-        return record_path
-
-    def check_installed_files(self):
-        """
-        Checks that the hashes and sizes of the files in ``RECORD`` are
-        matched by the files themselves. Returns a (possibly empty) list of
-        mismatches. Each entry in the mismatch list will be a tuple consisting
-        of the path, 'exists', 'size' or 'hash' according to what didn't match
-        (existence is checked first, then size, then hash), the expected
-        value and the actual value.
-        """
-        mismatches = []
-        base = os.path.dirname(self.path)
-        record_path = self.get_distinfo_file('RECORD')
-        for path, hash_value, size in self.list_installed_files():
-            if not os.path.isabs(path):
-                path = os.path.join(base, path)
-            if path == record_path:
-                continue
-            if not os.path.exists(path):
-                mismatches.append((path, 'exists', True, False))
-            elif os.path.isfile(path):
-                actual_size = str(os.path.getsize(path))
-                if size and actual_size != size:
-                    mismatches.append((path, 'size', size, actual_size))
-                elif hash_value:
-                    if '=' in hash_value:
-                        hasher = hash_value.split('=', 1)[0]
-                    else:
-                        hasher = None
-
-                    with open(path, 'rb') as f:
-                        actual_hash = self.get_hash(f.read(), hasher)
-                        if actual_hash != hash_value:
-                            mismatches.append((path, 'hash', hash_value, actual_hash))
-        return mismatches
-
-    @cached_property
-    def shared_locations(self):
-        """
-        A dictionary of shared locations whose keys are in the set 'prefix',
-        'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
-        The corresponding value is the absolute path of that category for
-        this distribution, and takes into account any paths selected by the
-        user at installation time (e.g. via command-line arguments). In the
-        case of the 'namespace' key, this would be a list of absolute paths
-        for the roots of namespace packages in this distribution.
-
-        The first time this property is accessed, the relevant information is
-        read from the SHARED file in the .dist-info directory.
-        """
-        result = {}
-        shared_path = os.path.join(self.path, 'SHARED')
-        if os.path.isfile(shared_path):
-            with codecs.open(shared_path, 'r', encoding='utf-8') as f:
-                lines = f.read().splitlines()
-            for line in lines:
-                key, value = line.split('=', 1)
-                if key == 'namespace':
-                    result.setdefault(key, []).append(value)
-                else:
-                    result[key] = value
-        return result
-
-    def write_shared_locations(self, paths, dry_run=False):
-        """
-        Write shared location information to the SHARED file in .dist-info.
-        :param paths: A dictionary as described in the documentation for
-        :meth:`shared_locations`.
-        :param dry_run: If True, the action is logged but no file is actually
-                        written.
-        :return: The path of the file written to.
-        """
-        shared_path = os.path.join(self.path, 'SHARED')
-        logger.info('creating %s', shared_path)
-        if dry_run:
-            return None
-        lines = []
-        for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
-            path = paths[key]
-            if os.path.isdir(paths[key]):
-                lines.append('%s=%s' % (key,  path))
-        for ns in paths.get('namespace', ()):
-            lines.append('namespace=%s' % ns)
-
-        with codecs.open(shared_path, 'w', encoding='utf-8') as f:
-            f.write('\n'.join(lines))
-        return shared_path
-
-    def get_distinfo_resource(self, path):
-        if path not in DIST_FILES:
-            raise DistlibException('invalid path for a dist-info file: '
-                                   '%r at %r' % (path, self.path))
-        finder = resources.finder_for_path(self.path)
-        if finder is None:
-            raise DistlibException('Unable to get a finder for %s' % self.path)
-        return finder.find(path)
-
-    def get_distinfo_file(self, path):
-        """
-        Returns a path located under the ``.dist-info`` directory. Returns a
-        string representing the path.
-
-        :parameter path: a ``'/'``-separated path relative to the
-                         ``.dist-info`` directory or an absolute path;
-                         If *path* is an absolute path and doesn't start
-                         with the ``.dist-info`` directory path,
-                         a :class:`DistlibException` is raised
-        :type path: str
-        :rtype: str
-        """
-        # Check if it is an absolute path  # XXX use relpath, add tests
-        if path.find(os.sep) >= 0:
-            # it's an absolute path?
-            distinfo_dirname, path = path.split(os.sep)[-2:]
-            if distinfo_dirname != self.path.split(os.sep)[-1]:
-                raise DistlibException(
-                    'dist-info file %r does not belong to the %r %s '
-                    'distribution' % (path, self.name, self.version))
-
-        # The file must be relative
-        if path not in DIST_FILES:
-            raise DistlibException('invalid path for a dist-info file: '
-                                   '%r at %r' % (path, self.path))
-
-        return os.path.join(self.path, path)
-
-    def list_distinfo_files(self):
-        """
-        Iterates over the ``RECORD`` entries and returns paths for each line if
-        the path is pointing to a file located in the ``.dist-info`` directory
-        or one of its subdirectories.
-
-        :returns: iterator of paths
-        """
-        base = os.path.dirname(self.path)
-        for path, checksum, size in self._get_records():
-            # XXX add separator or use real relpath algo
-            if not os.path.isabs(path):
-                path = os.path.join(base, path)
-            if path.startswith(self.path):
-                yield path
-
-    def __eq__(self, other):
-        return (isinstance(other, InstalledDistribution) and
-                self.path == other.path)
-
-    # See http://docs.python.org/reference/datamodel#object.__hash__
-    __hash__ = object.__hash__
-
-
-class EggInfoDistribution(BaseInstalledDistribution):
-    """Created with the *path* of the ``.egg-info`` directory or file provided
-    to the constructor. It reads the metadata contained in the file itself, or
-    if the given path happens to be a directory, the metadata is read from the
-    file ``PKG-INFO`` under that directory."""
-
-    requested = True    # as we have no way of knowing, assume it was
-    shared_locations = {}
-
-    def __init__(self, path, env=None):
-        def set_name_and_version(s, n, v):
-            s.name = n
-            s.key = n.lower()   # for case-insensitive comparisons
-            s.version = v
-
-        self.path = path
-        self.dist_path = env
-        if env and env._cache_enabled and path in env._cache_egg.path:
-            metadata = env._cache_egg.path[path].metadata
-            set_name_and_version(self, metadata.name, metadata.version)
-        else:
-            metadata = self._get_metadata(path)
-
-            # Need to be set before caching
-            set_name_and_version(self, metadata.name, metadata.version)
-
-            if env and env._cache_enabled:
-                env._cache_egg.add(self)
-        super(EggInfoDistribution, self).__init__(metadata, path, env)
-
-    def _get_metadata(self, path):
-        requires = None
-
-        def parse_requires_data(data):
-            """Create a list of dependencies from a requires.txt file.
-
-            *data*: the contents of a setuptools-produced requires.txt file.
-            """
-            reqs = []
-            lines = data.splitlines()
-            for line in lines:
-                line = line.strip()
-                if line.startswith('['):
-                    logger.warning('Unexpected line: quitting requirement scan: %r',
-                                   line)
-                    break
-                r = parse_requirement(line)
-                if not r:
-                    logger.warning('Not recognised as a requirement: %r', line)
-                    continue
-                if r.extras:
-                    logger.warning('extra requirements in requires.txt are '
-                                   'not supported')
-                if not r.constraints:
-                    reqs.append(r.name)
-                else:
-                    cons = ', '.join('%s%s' % c for c in r.constraints)
-                    reqs.append('%s (%s)' % (r.name, cons))
-            return reqs
-
-        def parse_requires_path(req_path):
-            """Create a list of dependencies from a requires.txt file.
-
-            *req_path*: the path to a setuptools-produced requires.txt file.
-            """
-
-            reqs = []
-            try:
-                with codecs.open(req_path, 'r', 'utf-8') as fp:
-                    reqs = parse_requires_data(fp.read())
-            except IOError:
-                pass
-            return reqs
-
-        tl_path = tl_data = None
-        if path.endswith('.egg'):
-            if os.path.isdir(path):
-                p = os.path.join(path, 'EGG-INFO')
-                meta_path = os.path.join(p, 'PKG-INFO')
-                metadata = Metadata(path=meta_path, scheme='legacy')
-                req_path = os.path.join(p, 'requires.txt')
-                tl_path = os.path.join(p, 'top_level.txt')
-                requires = parse_requires_path(req_path)
-            else:
-                # FIXME handle the case where zipfile is not available
-                zipf = zipimport.zipimporter(path)
-                fileobj = StringIO(
-                    zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
-                metadata = Metadata(fileobj=fileobj, scheme='legacy')
-                try:
-                    data = zipf.get_data('EGG-INFO/requires.txt')
-                    tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode('utf-8')
-                    requires = parse_requires_data(data.decode('utf-8'))
-                except IOError:
-                    requires = None
-        elif path.endswith('.egg-info'):
-            if os.path.isdir(path):
-                req_path = os.path.join(path, 'requires.txt')
-                requires = parse_requires_path(req_path)
-                path = os.path.join(path, 'PKG-INFO')
-                tl_path = os.path.join(path, 'top_level.txt')
-            metadata = Metadata(path=path, scheme='legacy')
-        else:
-            raise DistlibException('path must end with .egg-info or .egg, '
-                                   'got %r' % path)
-
-        if requires:
-            metadata.add_requirements(requires)
-        # look for top-level modules in top_level.txt, if present
-        if tl_data is None:
-            if tl_path is not None and os.path.exists(tl_path):
-                with open(tl_path, 'rb') as f:
-                    tl_data = f.read().decode('utf-8')
-        if not tl_data:
-            tl_data = []
-        else:
-            tl_data = tl_data.splitlines()
-        self.modules = tl_data
-        return metadata
-
-    def __repr__(self):
-        return '' % (
-            self.name, self.version, self.path)
-
-    def __str__(self):
-        return "%s %s" % (self.name, self.version)
-
-    def check_installed_files(self):
-        """
-        Checks that the hashes and sizes of the files in ``RECORD`` are
-        matched by the files themselves. Returns a (possibly empty) list of
-        mismatches. Each entry in the mismatch list will be a tuple consisting
-        of the path, 'exists', 'size' or 'hash' according to what didn't match
-        (existence is checked first, then size, then hash), the expected
-        value and the actual value.
-        """
-        mismatches = []
-        record_path = os.path.join(self.path, 'installed-files.txt')
-        if os.path.exists(record_path):
-            for path, _, _ in self.list_installed_files():
-                if path == record_path:
-                    continue
-                if not os.path.exists(path):
-                    mismatches.append((path, 'exists', True, False))
-        return mismatches
-
-    def list_installed_files(self):
-        """
-        Iterates over the ``installed-files.txt`` entries and returns a tuple
-        ``(path, hash, size)`` for each line.
-
-        :returns: a list of (path, hash, size)
-        """
-
-        def _md5(path):
-            f = open(path, 'rb')
-            try:
-                content = f.read()
-            finally:
-                f.close()
-            return hashlib.md5(content).hexdigest()
-
-        def _size(path):
-            return os.stat(path).st_size
-
-        record_path = os.path.join(self.path, 'installed-files.txt')
-        result = []
-        if os.path.exists(record_path):
-            with codecs.open(record_path, 'r', encoding='utf-8') as f:
-                for line in f:
-                    line = line.strip()
-                    p = os.path.normpath(os.path.join(self.path, line))
-                    # "./" is present as a marker between installed files
-                    # and installation metadata files
-                    if not os.path.exists(p):
-                        logger.warning('Non-existent file: %s', p)
-                        if p.endswith(('.pyc', '.pyo')):
-                            continue
-                        #otherwise fall through and fail
-                    if not os.path.isdir(p):
-                        result.append((p, _md5(p), _size(p)))
-            result.append((record_path, None, None))
-        return result
-
-    def list_distinfo_files(self, absolute=False):
-        """
-        Iterates over the ``installed-files.txt`` entries and returns paths for
-        each line if the path is pointing to a file located in the
-        ``.egg-info`` directory or one of its subdirectories.
-
-        :parameter absolute: If *absolute* is ``True``, each returned path is
-                          transformed into a local absolute path. Otherwise the
-                          raw value from ``installed-files.txt`` is returned.
-        :type absolute: boolean
-        :returns: iterator of paths
-        """
-        record_path = os.path.join(self.path, 'installed-files.txt')
-        if os.path.exists(record_path):
-            skip = True
-            with codecs.open(record_path, 'r', encoding='utf-8') as f:
-                for line in f:
-                    line = line.strip()
-                    if line == './':
-                        skip = False
-                        continue
-                    if not skip:
-                        p = os.path.normpath(os.path.join(self.path, line))
-                        if p.startswith(self.path):
-                            if absolute:
-                                yield p
-                            else:
-                                yield line
-
-    def __eq__(self, other):
-        return (isinstance(other, EggInfoDistribution) and
-                self.path == other.path)
-
-    # See http://docs.python.org/reference/datamodel#object.__hash__
-    __hash__ = object.__hash__
-
-new_dist_class = InstalledDistribution
-old_dist_class = EggInfoDistribution
-
-
-class DependencyGraph(object):
-    """
-    Represents a dependency graph between distributions.
-
-    The dependency relationships are stored in an ``adjacency_list`` that maps
-    distributions to a list of ``(other, label)`` tuples where  ``other``
-    is a distribution and the edge is labeled with ``label`` (i.e. the version
-    specifier, if such was provided). Also, for more efficient traversal, for
-    every distribution ``x``, a list of predecessors is kept in
-    ``reverse_list[x]``. An edge from distribution ``a`` to
-    distribution ``b`` means that ``a`` depends on ``b``. If any missing
-    dependencies are found, they are stored in ``missing``, which is a
-    dictionary that maps distributions to a list of requirements that were not
-    provided by any other distributions.
-    """
-
-    def __init__(self):
-        self.adjacency_list = {}
-        self.reverse_list = {}
-        self.missing = {}
-
-    def add_distribution(self, distribution):
-        """Add the *distribution* to the graph.
-
-        :type distribution: :class:`distutils2.database.InstalledDistribution`
-                            or :class:`distutils2.database.EggInfoDistribution`
-        """
-        self.adjacency_list[distribution] = []
-        self.reverse_list[distribution] = []
-        #self.missing[distribution] = []
-
-    def add_edge(self, x, y, label=None):
-        """Add an edge from distribution *x* to distribution *y* with the given
-        *label*.
-
-        :type x: :class:`distutils2.database.InstalledDistribution` or
-                 :class:`distutils2.database.EggInfoDistribution`
-        :type y: :class:`distutils2.database.InstalledDistribution` or
-                 :class:`distutils2.database.EggInfoDistribution`
-        :type label: ``str`` or ``None``
-        """
-        self.adjacency_list[x].append((y, label))
-        # multiple edges are allowed, so be careful
-        if x not in self.reverse_list[y]:
-            self.reverse_list[y].append(x)
-
-    def add_missing(self, distribution, requirement):
-        """
-        Add a missing *requirement* for the given *distribution*.
-
-        :type distribution: :class:`distutils2.database.InstalledDistribution`
-                            or :class:`distutils2.database.EggInfoDistribution`
-        :type requirement: ``str``
-        """
-        logger.debug('%s missing %r', distribution, requirement)
-        self.missing.setdefault(distribution, []).append(requirement)
-
-    def _repr_dist(self, dist):
-        return '%s %s' % (dist.name, dist.version)
-
-    def repr_node(self, dist, level=1):
-        """Prints only a subgraph"""
-        output = [self._repr_dist(dist)]
-        for other, label in self.adjacency_list[dist]:
-            dist = self._repr_dist(other)
-            if label is not None:
-                dist = '%s [%s]' % (dist, label)
-            output.append('    ' * level + str(dist))
-            suboutput = self.repr_node(other, level + 1)
-            subs = suboutput.split('\n')
-            output.extend(subs[1:])
-        return '\n'.join(output)
-
-    def to_dot(self, f, skip_disconnected=True):
-        """Writes a DOT output for the graph to the provided file *f*.
-
-        If *skip_disconnected* is set to ``True``, then all distributions
-        that are not dependent on any other distribution are skipped.
-
-        :type f: has to support ``file``-like operations
-        :type skip_disconnected: ``bool``
-        """
-        disconnected = []
-
-        f.write("digraph dependencies {\n")
-        for dist, adjs in self.adjacency_list.items():
-            if len(adjs) == 0 and not skip_disconnected:
-                disconnected.append(dist)
-            for other, label in adjs:
-                if not label is None:
-                    f.write('"%s" -> "%s" [label="%s"]\n' %
-                            (dist.name, other.name, label))
-                else:
-                    f.write('"%s" -> "%s"\n' % (dist.name, other.name))
-        if not skip_disconnected and len(disconnected) > 0:
-            f.write('subgraph disconnected {\n')
-            f.write('label = "Disconnected"\n')
-            f.write('bgcolor = red\n')
-
-            for dist in disconnected:
-                f.write('"%s"' % dist.name)
-                f.write('\n')
-            f.write('}\n')
-        f.write('}\n')
-
-    def topological_sort(self):
-        """
-        Perform a topological sort of the graph.
-        :return: A tuple, the first element of which is a topologically sorted
-                 list of distributions, and the second element of which is a
-                 list of distributions that cannot be sorted because they have
-                 circular dependencies and so form a cycle.
-        """
-        result = []
-        # Make a shallow copy of the adjacency list
-        alist = {}
-        for k, v in self.adjacency_list.items():
-            alist[k] = v[:]
-        while True:
-            # See what we can remove in this run
-            to_remove = []
-            for k, v in list(alist.items())[:]:
-                if not v:
-                    to_remove.append(k)
-                    del alist[k]
-            if not to_remove:
-                # What's left in alist (if anything) is a cycle.
-                break
-            # Remove from the adjacency list of others
-            for k, v in alist.items():
-                alist[k] = [(d, r) for d, r in v if d not in to_remove]
-            logger.debug('Moving to result: %s',
-                         ['%s (%s)' % (d.name, d.version) for d in to_remove])
-            result.extend(to_remove)
-        return result, list(alist.keys())
-
-    def __repr__(self):
-        """Representation of the graph"""
-        output = []
-        for dist, adjs in self.adjacency_list.items():
-            output.append(self.repr_node(dist))
-        return '\n'.join(output)
-
-
-def make_graph(dists, scheme='default'):
-    """Makes a dependency graph from the given distributions.
-
-    :parameter dists: a list of distributions
-    :type dists: list of :class:`distutils2.database.InstalledDistribution` and
-                 :class:`distutils2.database.EggInfoDistribution` instances
-    :rtype: a :class:`DependencyGraph` instance
-    """
-    scheme = get_scheme(scheme)
-    graph = DependencyGraph()
-    provided = {}  # maps names to lists of (version, dist) tuples
-
-    # first, build the graph and find out what's provided
-    for dist in dists:
-        graph.add_distribution(dist)
-
-        for p in dist.provides:
-            name, version = parse_name_and_version(p)
-            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
-            provided.setdefault(name, []).append((version, dist))
-
-    # now make the edges
-    for dist in dists:
-        requires = (dist.run_requires | dist.meta_requires |
-                    dist.build_requires | dist.dev_requires)
-        for req in requires:
-            try:
-                matcher = scheme.matcher(req)
-            except UnsupportedVersionError:
-                # XXX compat-mode if cannot read the version
-                logger.warning('could not read version %r - using name only',
-                               req)
-                name = req.split()[0]
-                matcher = scheme.matcher(name)
-
-            name = matcher.key   # case-insensitive
-
-            matched = False
-            if name in provided:
-                for version, provider in provided[name]:
-                    try:
-                        match = matcher.match(version)
-                    except UnsupportedVersionError:
-                        match = False
-
-                    if match:
-                        graph.add_edge(dist, provider, req)
-                        matched = True
-                        break
-            if not matched:
-                graph.add_missing(dist, req)
-    return graph
-
-
-def get_dependent_dists(dists, dist):
-    """Recursively generate a list of distributions from *dists* that are
-    dependent on *dist*.
-
-    :param dists: a list of distributions
-    :param dist: a distribution, member of *dists* for which we are interested
-    """
-    if dist not in dists:
-        raise DistlibException('given distribution %r is not a member '
-                               'of the list' % dist.name)
-    graph = make_graph(dists)
-
-    dep = [dist]  # dependent distributions
-    todo = graph.reverse_list[dist]  # list of nodes we should inspect
-
-    while todo:
-        d = todo.pop()
-        dep.append(d)
-        for succ in graph.reverse_list[d]:
-            if succ not in dep:
-                todo.append(succ)
-
-    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
-    return dep
-
-
-def get_required_dists(dists, dist):
-    """Recursively generate a list of distributions from *dists* that are
-    required by *dist*.
-
-    :param dists: a list of distributions
-    :param dist: a distribution, member of *dists* for which we are interested
-    """
-    if dist not in dists:
-        raise DistlibException('given distribution %r is not a member '
-                               'of the list' % dist.name)
-    graph = make_graph(dists)
-
-    req = []  # required distributions
-    todo = graph.adjacency_list[dist]  # list of nodes we should inspect
-
-    while todo:
-        d = todo.pop()[0]
-        req.append(d)
-        for pred in graph.adjacency_list[d]:
-            if pred not in req:
-                todo.append(pred)
-
-    return req
-
-
-def make_dist(name, version, **kwargs):
-    """
-    A convenience method for making a dist given just a name and version.
-    """
-    summary = kwargs.pop('summary', 'Placeholder for summary')
-    md = Metadata(**kwargs)
-    md.name = name
-    md.version = version
-    md.summary = summary or 'Placeholder for summary'
-    return Distribution(md)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/index.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/index.py
deleted file mode 100644
index 7a87cdc..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/index.py
+++ /dev/null
@@ -1,516 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2013 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-import hashlib
-import logging
-import os
-import shutil
-import subprocess
-import tempfile
-try:
-    from threading import Thread
-except ImportError:
-    from dummy_threading import Thread
-
-from . import DistlibException
-from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
-                     urlparse, build_opener, string_types)
-from .util import cached_property, zip_dir, ServerProxy
-
-logger = logging.getLogger(__name__)
-
-DEFAULT_INDEX = 'https://pypi.org/pypi'
-DEFAULT_REALM = 'pypi'
-
-class PackageIndex(object):
-    """
-    This class represents a package index compatible with PyPI, the Python
-    Package Index.
-    """
-
-    boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
-
-    def __init__(self, url=None):
-        """
-        Initialise an instance.
-
-        :param url: The URL of the index. If not specified, the URL for PyPI is
-                    used.
-        """
-        self.url = url or DEFAULT_INDEX
-        self.read_configuration()
-        scheme, netloc, path, params, query, frag = urlparse(self.url)
-        if params or query or frag or scheme not in ('http', 'https'):
-            raise DistlibException('invalid repository: %s' % self.url)
-        self.password_handler = None
-        self.ssl_verifier = None
-        self.gpg = None
-        self.gpg_home = None
-        with open(os.devnull, 'w') as sink:
-            # Use gpg by default rather than gpg2, as gpg2 insists on
-            # prompting for passwords
-            for s in ('gpg', 'gpg2'):
-                try:
-                    rc = subprocess.check_call([s, '--version'], stdout=sink,
-                                               stderr=sink)
-                    if rc == 0:
-                        self.gpg = s
-                        break
-                except OSError:
-                    pass
-
-    def _get_pypirc_command(self):
-        """
-        Get the distutils command for interacting with PyPI configurations.
-        :return: the command.
-        """
-        from distutils.core import Distribution
-        from distutils.config import PyPIRCCommand
-        d = Distribution()
-        return PyPIRCCommand(d)
-
-    def read_configuration(self):
-        """
-        Read the PyPI access configuration as supported by distutils, getting
-        PyPI to do the actual work. This populates ``username``, ``password``,
-        ``realm`` and ``url`` attributes from the configuration.
-        """
-        # get distutils to do the work
-        c = self._get_pypirc_command()
-        c.repository = self.url
-        cfg = c._read_pypirc()
-        self.username = cfg.get('username')
-        self.password = cfg.get('password')
-        self.realm = cfg.get('realm', 'pypi')
-        self.url = cfg.get('repository', self.url)
-
-    def save_configuration(self):
-        """
-        Save the PyPI access configuration. You must have set ``username`` and
-        ``password`` attributes before calling this method.
-
-        Again, distutils is used to do the actual work.
-        """
-        self.check_credentials()
-        # get distutils to do the work
-        c = self._get_pypirc_command()
-        c._store_pypirc(self.username, self.password)
-
-    def check_credentials(self):
-        """
-        Check that ``username`` and ``password`` have been set, and raise an
-        exception if not.
-        """
-        if self.username is None or self.password is None:
-            raise DistlibException('username and password must be set')
-        pm = HTTPPasswordMgr()
-        _, netloc, _, _, _, _ = urlparse(self.url)
-        pm.add_password(self.realm, netloc, self.username, self.password)
-        self.password_handler = HTTPBasicAuthHandler(pm)
-
-    def register(self, metadata):
-        """
-        Register a distribution on PyPI, using the provided metadata.
-
-        :param metadata: A :class:`Metadata` instance defining at least a name
-                         and version number for the distribution to be
-                         registered.
-        :return: The HTTP response received from PyPI upon submission of the
-                request.
-        """
-        self.check_credentials()
-        metadata.validate()
-        d = metadata.todict()
-        d[':action'] = 'verify'
-        request = self.encode_request(d.items(), [])
-        response = self.send_request(request)
-        d[':action'] = 'submit'
-        request = self.encode_request(d.items(), [])
-        return self.send_request(request)
-
-    def _reader(self, name, stream, outbuf):
-        """
-        Thread runner for reading lines of from a subprocess into a buffer.
-
-        :param name: The logical name of the stream (used for logging only).
-        :param stream: The stream to read from. This will typically a pipe
-                       connected to the output stream of a subprocess.
-        :param outbuf: The list to append the read lines to.
-        """
-        while True:
-            s = stream.readline()
-            if not s:
-                break
-            s = s.decode('utf-8').rstrip()
-            outbuf.append(s)
-            logger.debug('%s: %s' % (name, s))
-        stream.close()
-
-    def get_sign_command(self, filename, signer, sign_password,
-                         keystore=None):
-        """
-        Return a suitable command for signing a file.
-
-        :param filename: The pathname to the file to be signed.
-        :param signer: The identifier of the signer of the file.
-        :param sign_password: The passphrase for the signer's
-                              private key used for signing.
-        :param keystore: The path to a directory which contains the keys
-                         used in verification. If not specified, the
-                         instance's ``gpg_home`` attribute is used instead.
-        :return: The signing command as a list suitable to be
-                 passed to :class:`subprocess.Popen`.
-        """
-        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
-        if keystore is None:
-            keystore = self.gpg_home
-        if keystore:
-            cmd.extend(['--homedir', keystore])
-        if sign_password is not None:
-            cmd.extend(['--batch', '--passphrase-fd', '0'])
-        td = tempfile.mkdtemp()
-        sf = os.path.join(td, os.path.basename(filename) + '.asc')
-        cmd.extend(['--detach-sign', '--armor', '--local-user',
-                    signer, '--output', sf, filename])
-        logger.debug('invoking: %s', ' '.join(cmd))
-        return cmd, sf
-
-    def run_command(self, cmd, input_data=None):
-        """
-        Run a command in a child process , passing it any input data specified.
-
-        :param cmd: The command to run.
-        :param input_data: If specified, this must be a byte string containing
-                           data to be sent to the child process.
-        :return: A tuple consisting of the subprocess' exit code, a list of
-                 lines read from the subprocess' ``stdout``, and a list of
-                 lines read from the subprocess' ``stderr``.
-        """
-        kwargs = {
-            'stdout': subprocess.PIPE,
-            'stderr': subprocess.PIPE,
-        }
-        if input_data is not None:
-            kwargs['stdin'] = subprocess.PIPE
-        stdout = []
-        stderr = []
-        p = subprocess.Popen(cmd, **kwargs)
-        # We don't use communicate() here because we may need to
-        # get clever with interacting with the command
-        t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
-        t1.start()
-        t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
-        t2.start()
-        if input_data is not None:
-            p.stdin.write(input_data)
-            p.stdin.close()
-
-        p.wait()
-        t1.join()
-        t2.join()
-        return p.returncode, stdout, stderr
-
-    def sign_file(self, filename, signer, sign_password, keystore=None):
-        """
-        Sign a file.
-
-        :param filename: The pathname to the file to be signed.
-        :param signer: The identifier of the signer of the file.
-        :param sign_password: The passphrase for the signer's
-                              private key used for signing.
-        :param keystore: The path to a directory which contains the keys
-                         used in signing. If not specified, the instance's
-                         ``gpg_home`` attribute is used instead.
-        :return: The absolute pathname of the file where the signature is
-                 stored.
-        """
-        cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
-                                              keystore)
-        rc, stdout, stderr = self.run_command(cmd,
-                                              sign_password.encode('utf-8'))
-        if rc != 0:
-            raise DistlibException('sign command failed with error '
-                                   'code %s' % rc)
-        return sig_file
-
-    def upload_file(self, metadata, filename, signer=None, sign_password=None,
-                    filetype='sdist', pyversion='source', keystore=None):
-        """
-        Upload a release file to the index.
-
-        :param metadata: A :class:`Metadata` instance defining at least a name
-                         and version number for the file to be uploaded.
-        :param filename: The pathname of the file to be uploaded.
-        :param signer: The identifier of the signer of the file.
-        :param sign_password: The passphrase for the signer's
-                              private key used for signing.
-        :param filetype: The type of the file being uploaded. This is the
-                        distutils command which produced that file, e.g.
-                        ``sdist`` or ``bdist_wheel``.
-        :param pyversion: The version of Python which the release relates
-                          to. For code compatible with any Python, this would
-                          be ``source``, otherwise it would be e.g. ``3.2``.
-        :param keystore: The path to a directory which contains the keys
-                         used in signing. If not specified, the instance's
-                         ``gpg_home`` attribute is used instead.
-        :return: The HTTP response received from PyPI upon submission of the
-                request.
-        """
-        self.check_credentials()
-        if not os.path.exists(filename):
-            raise DistlibException('not found: %s' % filename)
-        metadata.validate()
-        d = metadata.todict()
-        sig_file = None
-        if signer:
-            if not self.gpg:
-                logger.warning('no signing program available - not signed')
-            else:
-                sig_file = self.sign_file(filename, signer, sign_password,
-                                          keystore)
-        with open(filename, 'rb') as f:
-            file_data = f.read()
-        md5_digest = hashlib.md5(file_data).hexdigest()
-        sha256_digest = hashlib.sha256(file_data).hexdigest()
-        d.update({
-            ':action': 'file_upload',
-            'protocol_version': '1',
-            'filetype': filetype,
-            'pyversion': pyversion,
-            'md5_digest': md5_digest,
-            'sha256_digest': sha256_digest,
-        })
-        files = [('content', os.path.basename(filename), file_data)]
-        if sig_file:
-            with open(sig_file, 'rb') as f:
-                sig_data = f.read()
-            files.append(('gpg_signature', os.path.basename(sig_file),
-                         sig_data))
-            shutil.rmtree(os.path.dirname(sig_file))
-        request = self.encode_request(d.items(), files)
-        return self.send_request(request)
-
-    def upload_documentation(self, metadata, doc_dir):
-        """
-        Upload documentation to the index.
-
-        :param metadata: A :class:`Metadata` instance defining at least a name
-                         and version number for the documentation to be
-                         uploaded.
-        :param doc_dir: The pathname of the directory which contains the
-                        documentation. This should be the directory that
-                        contains the ``index.html`` for the documentation.
-        :return: The HTTP response received from PyPI upon submission of the
-                request.
-        """
-        self.check_credentials()
-        if not os.path.isdir(doc_dir):
-            raise DistlibException('not a directory: %r' % doc_dir)
-        fn = os.path.join(doc_dir, 'index.html')
-        if not os.path.exists(fn):
-            raise DistlibException('not found: %r' % fn)
-        metadata.validate()
-        name, version = metadata.name, metadata.version
-        zip_data = zip_dir(doc_dir).getvalue()
-        fields = [(':action', 'doc_upload'),
-                  ('name', name), ('version', version)]
-        files = [('content', name, zip_data)]
-        request = self.encode_request(fields, files)
-        return self.send_request(request)
-
-    def get_verify_command(self, signature_filename, data_filename,
-                           keystore=None):
-        """
-        Return a suitable command for verifying a file.
-
-        :param signature_filename: The pathname to the file containing the
-                                   signature.
-        :param data_filename: The pathname to the file containing the
-                              signed data.
-        :param keystore: The path to a directory which contains the keys
-                         used in verification. If not specified, the
-                         instance's ``gpg_home`` attribute is used instead.
-        :return: The verifying command as a list suitable to be
-                 passed to :class:`subprocess.Popen`.
-        """
-        cmd = [self.gpg, '--status-fd', '2', '--no-tty']
-        if keystore is None:
-            keystore = self.gpg_home
-        if keystore:
-            cmd.extend(['--homedir', keystore])
-        cmd.extend(['--verify', signature_filename, data_filename])
-        logger.debug('invoking: %s', ' '.join(cmd))
-        return cmd
-
-    def verify_signature(self, signature_filename, data_filename,
-                         keystore=None):
-        """
-        Verify a signature for a file.
-
-        :param signature_filename: The pathname to the file containing the
-                                   signature.
-        :param data_filename: The pathname to the file containing the
-                              signed data.
-        :param keystore: The path to a directory which contains the keys
-                         used in verification. If not specified, the
-                         instance's ``gpg_home`` attribute is used instead.
-        :return: True if the signature was verified, else False.
-        """
-        if not self.gpg:
-            raise DistlibException('verification unavailable because gpg '
-                                   'unavailable')
-        cmd = self.get_verify_command(signature_filename, data_filename,
-                                      keystore)
-        rc, stdout, stderr = self.run_command(cmd)
-        if rc not in (0, 1):
-            raise DistlibException('verify command failed with error '
-                             'code %s' % rc)
-        return rc == 0
-
-    def download_file(self, url, destfile, digest=None, reporthook=None):
-        """
-        This is a convenience method for downloading a file from an URL.
-        Normally, this will be a file from the index, though currently
-        no check is made for this (i.e. a file can be downloaded from
-        anywhere).
-
-        The method is just like the :func:`urlretrieve` function in the
-        standard library, except that it allows digest computation to be
-        done during download and checking that the downloaded data
-        matched any expected value.
-
-        :param url: The URL of the file to be downloaded (assumed to be
-                    available via an HTTP GET request).
-        :param destfile: The pathname where the downloaded file is to be
-                         saved.
-        :param digest: If specified, this must be a (hasher, value)
-                       tuple, where hasher is the algorithm used (e.g.
-                       ``'md5'``) and ``value`` is the expected value.
-        :param reporthook: The same as for :func:`urlretrieve` in the
-                           standard library.
-        """
-        if digest is None:
-            digester = None
-            logger.debug('No digest specified')
-        else:
-            if isinstance(digest, (list, tuple)):
-                hasher, digest = digest
-            else:
-                hasher = 'md5'
-            digester = getattr(hashlib, hasher)()
-            logger.debug('Digest specified: %s' % digest)
-        # The following code is equivalent to urlretrieve.
-        # We need to do it this way so that we can compute the
-        # digest of the file as we go.
-        with open(destfile, 'wb') as dfp:
-            # addinfourl is not a context manager on 2.x
-            # so we have to use try/finally
-            sfp = self.send_request(Request(url))
-            try:
-                headers = sfp.info()
-                blocksize = 8192
-                size = -1
-                read = 0
-                blocknum = 0
-                if "content-length" in headers:
-                    size = int(headers["Content-Length"])
-                if reporthook:
-                    reporthook(blocknum, blocksize, size)
-                while True:
-                    block = sfp.read(blocksize)
-                    if not block:
-                        break
-                    read += len(block)
-                    dfp.write(block)
-                    if digester:
-                        digester.update(block)
-                    blocknum += 1
-                    if reporthook:
-                        reporthook(blocknum, blocksize, size)
-            finally:
-                sfp.close()
-
-        # check that we got the whole file, if we can
-        if size >= 0 and read < size:
-            raise DistlibException(
-                'retrieval incomplete: got only %d out of %d bytes'
-                % (read, size))
-        # if we have a digest, it must match.
-        if digester:
-            actual = digester.hexdigest()
-            if digest != actual:
-                raise DistlibException('%s digest mismatch for %s: expected '
-                                       '%s, got %s' % (hasher, destfile,
-                                                       digest, actual))
-            logger.debug('Digest verified: %s', digest)
-
-    def send_request(self, req):
-        """
-        Send a standard library :class:`Request` to PyPI and return its
-        response.
-
-        :param req: The request to send.
-        :return: The HTTP response from PyPI (a standard library HTTPResponse).
-        """
-        handlers = []
-        if self.password_handler:
-            handlers.append(self.password_handler)
-        if self.ssl_verifier:
-            handlers.append(self.ssl_verifier)
-        opener = build_opener(*handlers)
-        return opener.open(req)
-
-    def encode_request(self, fields, files):
-        """
-        Encode fields and files for posting to an HTTP server.
-
-        :param fields: The fields to send as a list of (fieldname, value)
-                       tuples.
-        :param files: The files to send as a list of (fieldname, filename,
-                      file_bytes) tuple.
-        """
-        # Adapted from packaging, which in turn was adapted from
-        # http://code.activestate.com/recipes/146306
-
-        parts = []
-        boundary = self.boundary
-        for k, values in fields:
-            if not isinstance(values, (list, tuple)):
-                values = [values]
-
-            for v in values:
-                parts.extend((
-                    b'--' + boundary,
-                    ('Content-Disposition: form-data; name="%s"' %
-                     k).encode('utf-8'),
-                    b'',
-                    v.encode('utf-8')))
-        for key, filename, value in files:
-            parts.extend((
-                b'--' + boundary,
-                ('Content-Disposition: form-data; name="%s"; filename="%s"' %
-                 (key, filename)).encode('utf-8'),
-                b'',
-                value))
-
-        parts.extend((b'--' + boundary + b'--', b''))
-
-        body = b'\r\n'.join(parts)
-        ct = b'multipart/form-data; boundary=' + boundary
-        headers = {
-            'Content-type': ct,
-            'Content-length': str(len(body))
-        }
-        return Request(self.url, body, headers)
-
-    def search(self, terms, operator=None):
-        if isinstance(terms, string_types):
-            terms = {'name': terms}
-        rpc_proxy = ServerProxy(self.url, timeout=3.0)
-        try:
-            return rpc_proxy.search(terms, operator or 'and')
-        finally:
-            rpc_proxy('close')()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py
deleted file mode 100644
index a7ed946..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/locators.py
+++ /dev/null
@@ -1,1295 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012-2015 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-
-import gzip
-from io import BytesIO
-import json
-import logging
-import os
-import posixpath
-import re
-try:
-    import threading
-except ImportError:  # pragma: no cover
-    import dummy_threading as threading
-import zlib
-
-from . import DistlibException
-from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
-                     queue, quote, unescape, string_types, build_opener,
-                     HTTPRedirectHandler as BaseRedirectHandler, text_type,
-                     Request, HTTPError, URLError)
-from .database import Distribution, DistributionPath, make_dist
-from .metadata import Metadata, MetadataInvalidError
-from .util import (cached_property, parse_credentials, ensure_slash,
-                   split_filename, get_project_data, parse_requirement,
-                   parse_name_and_version, ServerProxy, normalize_name)
-from .version import get_scheme, UnsupportedVersionError
-from .wheel import Wheel, is_compatible
-
-logger = logging.getLogger(__name__)
-
-HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
-CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
-HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
-DEFAULT_INDEX = 'https://pypi.org/pypi'
-
-def get_all_distribution_names(url=None):
-    """
-    Return all distribution names known by an index.
-    :param url: The URL of the index.
-    :return: A list of all known distribution names.
-    """
-    if url is None:
-        url = DEFAULT_INDEX
-    client = ServerProxy(url, timeout=3.0)
-    try:
-        return client.list_packages()
-    finally:
-        client('close')()
-
-class RedirectHandler(BaseRedirectHandler):
-    """
-    A class to work around a bug in some Python 3.2.x releases.
-    """
-    # There's a bug in the base version for some 3.2.x
-    # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
-    # returns e.g. /abc, it bails because it says the scheme ''
-    # is bogus, when actually it should use the request's
-    # URL for the scheme. See Python issue #13696.
-    def http_error_302(self, req, fp, code, msg, headers):
-        # Some servers (incorrectly) return multiple Location headers
-        # (so probably same goes for URI).  Use first header.
-        newurl = None
-        for key in ('location', 'uri'):
-            if key in headers:
-                newurl = headers[key]
-                break
-        if newurl is None:  # pragma: no cover
-            return
-        urlparts = urlparse(newurl)
-        if urlparts.scheme == '':
-            newurl = urljoin(req.get_full_url(), newurl)
-            if hasattr(headers, 'replace_header'):
-                headers.replace_header(key, newurl)
-            else:
-                headers[key] = newurl
-        return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
-                                                  headers)
-
-    http_error_301 = http_error_303 = http_error_307 = http_error_302
-
-class Locator(object):
-    """
-    A base class for locators - things that locate distributions.
-    """
-    source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
-    binary_extensions = ('.egg', '.exe', '.whl')
-    excluded_extensions = ('.pdf',)
-
-    # A list of tags indicating which wheels you want to match. The default
-    # value of None matches against the tags compatible with the running
-    # Python. If you want to match other values, set wheel_tags on a locator
-    # instance to a list of tuples (pyver, abi, arch) which you want to match.
-    wheel_tags = None
-
-    downloadable_extensions = source_extensions + ('.whl',)
-
-    def __init__(self, scheme='default'):
-        """
-        Initialise an instance.
-        :param scheme: Because locators look for most recent versions, they
-                       need to know the version scheme to use. This specifies
-                       the current PEP-recommended scheme - use ``'legacy'``
-                       if you need to support existing distributions on PyPI.
-        """
-        self._cache = {}
-        self.scheme = scheme
-        # Because of bugs in some of the handlers on some of the platforms,
-        # we use our own opener rather than just using urlopen.
-        self.opener = build_opener(RedirectHandler())
-        # If get_project() is called from locate(), the matcher instance
-        # is set from the requirement passed to locate(). See issue #18 for
-        # why this can be useful to know.
-        self.matcher = None
-        self.errors = queue.Queue()
-
-    def get_errors(self):
-        """
-        Return any errors which have occurred.
-        """
-        result = []
-        while not self.errors.empty():  # pragma: no cover
-            try:
-                e = self.errors.get(False)
-                result.append(e)
-            except self.errors.Empty:
-                continue
-            self.errors.task_done()
-        return result
-
-    def clear_errors(self):
-        """
-        Clear any errors which may have been logged.
-        """
-        # Just get the errors and throw them away
-        self.get_errors()
-
-    def clear_cache(self):
-        self._cache.clear()
-
-    def _get_scheme(self):
-        return self._scheme
-
-    def _set_scheme(self, value):
-        self._scheme = value
-
-    scheme = property(_get_scheme, _set_scheme)
-
-    def _get_project(self, name):
-        """
-        For a given project, get a dictionary mapping available versions to Distribution
-        instances.
-
-        This should be implemented in subclasses.
-
-        If called from a locate() request, self.matcher will be set to a
-        matcher for the requirement to satisfy, otherwise it will be None.
-        """
-        raise NotImplementedError('Please implement in the subclass')
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        raise NotImplementedError('Please implement in the subclass')
-
-    def get_project(self, name):
-        """
-        For a given project, get a dictionary mapping available versions to Distribution
-        instances.
-
-        This calls _get_project to do all the work, and just implements a caching layer on top.
-        """
-        if self._cache is None:  # pragma: no cover
-            result = self._get_project(name)
-        elif name in self._cache:
-            result = self._cache[name]
-        else:
-            self.clear_errors()
-            result = self._get_project(name)
-            self._cache[name] = result
-        return result
-
-    def score_url(self, url):
-        """
-        Give an url a score which can be used to choose preferred URLs
-        for a given project release.
-        """
-        t = urlparse(url)
-        basename = posixpath.basename(t.path)
-        compatible = True
-        is_wheel = basename.endswith('.whl')
-        is_downloadable = basename.endswith(self.downloadable_extensions)
-        if is_wheel:
-            compatible = is_compatible(Wheel(basename), self.wheel_tags)
-        return (t.scheme == 'https', 'pypi.org' in t.netloc,
-                is_downloadable, is_wheel, compatible, basename)
-
-    def prefer_url(self, url1, url2):
-        """
-        Choose one of two URLs where both are candidates for distribution
-        archives for the same version of a distribution (for example,
-        .tar.gz vs. zip).
-
-        The current implementation favours https:// URLs over http://, archives
-        from PyPI over those from other locations, wheel compatibility (if a
-        wheel) and then the archive name.
-        """
-        result = url2
-        if url1:
-            s1 = self.score_url(url1)
-            s2 = self.score_url(url2)
-            if s1 > s2:
-                result = url1
-            if result != url2:
-                logger.debug('Not replacing %r with %r', url1, url2)
-            else:
-                logger.debug('Replacing %r with %r', url1, url2)
-        return result
-
-    def split_filename(self, filename, project_name):
-        """
-        Attempt to split a filename in project name, version and Python version.
-        """
-        return split_filename(filename, project_name)
-
-    def convert_url_to_download_info(self, url, project_name):
-        """
-        See if a URL is a candidate for a download URL for a project (the URL
-        has typically been scraped from an HTML page).
-
-        If it is, a dictionary is returned with keys "name", "version",
-        "filename" and "url"; otherwise, None is returned.
-        """
-        def same_project(name1, name2):
-            return normalize_name(name1) == normalize_name(name2)
-
-        result = None
-        scheme, netloc, path, params, query, frag = urlparse(url)
-        if frag.lower().startswith('egg='):  # pragma: no cover
-            logger.debug('%s: version hint in fragment: %r',
-                         project_name, frag)
-        m = HASHER_HASH.match(frag)
-        if m:
-            algo, digest = m.groups()
-        else:
-            algo, digest = None, None
-        origpath = path
-        if path and path[-1] == '/':  # pragma: no cover
-            path = path[:-1]
-        if path.endswith('.whl'):
-            try:
-                wheel = Wheel(path)
-                if not is_compatible(wheel, self.wheel_tags):
-                    logger.debug('Wheel not compatible: %s', path)
-                else:
-                    if project_name is None:
-                        include = True
-                    else:
-                        include = same_project(wheel.name, project_name)
-                    if include:
-                        result = {
-                            'name': wheel.name,
-                            'version': wheel.version,
-                            'filename': wheel.filename,
-                            'url': urlunparse((scheme, netloc, origpath,
-                                               params, query, '')),
-                            'python-version': ', '.join(
-                                ['.'.join(list(v[2:])) for v in wheel.pyver]),
-                        }
-            except Exception as e:  # pragma: no cover
-                logger.warning('invalid path for wheel: %s', path)
-        elif not path.endswith(self.downloadable_extensions):  # pragma: no cover
-            logger.debug('Not downloadable: %s', path)
-        else:  # downloadable extension
-            path = filename = posixpath.basename(path)
-            for ext in self.downloadable_extensions:
-                if path.endswith(ext):
-                    path = path[:-len(ext)]
-                    t = self.split_filename(path, project_name)
-                    if not t:  # pragma: no cover
-                        logger.debug('No match for project/version: %s', path)
-                    else:
-                        name, version, pyver = t
-                        if not project_name or same_project(project_name, name):
-                            result = {
-                                'name': name,
-                                'version': version,
-                                'filename': filename,
-                                'url': urlunparse((scheme, netloc, origpath,
-                                                   params, query, '')),
-                                #'packagetype': 'sdist',
-                            }
-                            if pyver:  # pragma: no cover
-                                result['python-version'] = pyver
-                    break
-        if result and algo:
-            result['%s_digest' % algo] = digest
-        return result
-
-    def _get_digest(self, info):
-        """
-        Get a digest from a dictionary by looking at keys of the form
-        'algo_digest'.
-
-        Returns a 2-tuple (algo, digest) if found, else None. Currently
-        looks only for SHA256, then MD5.
-        """
-        result = None
-        for algo in ('sha256', 'md5'):
-            key = '%s_digest' % algo
-            if key in info:
-                result = (algo, info[key])
-                break
-        return result
-
-    def _update_version_data(self, result, info):
-        """
-        Update a result dictionary (the final result from _get_project) with a
-        dictionary for a specific version, which typically holds information
-        gleaned from a filename or URL for an archive for the distribution.
-        """
-        name = info.pop('name')
-        version = info.pop('version')
-        if version in result:
-            dist = result[version]
-            md = dist.metadata
-        else:
-            dist = make_dist(name, version, scheme=self.scheme)
-            md = dist.metadata
-        dist.digest = digest = self._get_digest(info)
-        url = info['url']
-        result['digests'][url] = digest
-        if md.source_url != info['url']:
-            md.source_url = self.prefer_url(md.source_url, url)
-            result['urls'].setdefault(version, set()).add(url)
-        dist.locator = self
-        result[version] = dist
-
-    def locate(self, requirement, prereleases=False):
-        """
-        Find the most recent distribution which matches the given
-        requirement.
-
-        :param requirement: A requirement of the form 'foo (1.0)' or perhaps
-                            'foo (>= 1.0, < 2.0, != 1.3)'
-        :param prereleases: If ``True``, allow pre-release versions
-                            to be located. Otherwise, pre-release versions
-                            are not returned.
-        :return: A :class:`Distribution` instance, or ``None`` if no such
-                 distribution could be located.
-        """
-        result = None
-        r = parse_requirement(requirement)
-        if r is None:  # pragma: no cover
-            raise DistlibException('Not a valid requirement: %r' % requirement)
-        scheme = get_scheme(self.scheme)
-        self.matcher = matcher = scheme.matcher(r.requirement)
-        logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
-        versions = self.get_project(r.name)
-        if len(versions) > 2:   # urls and digests keys are present
-            # sometimes, versions are invalid
-            slist = []
-            vcls = matcher.version_class
-            for k in versions:
-                if k in ('urls', 'digests'):
-                    continue
-                try:
-                    if not matcher.match(k):
-                        logger.debug('%s did not match %r', matcher, k)
-                    else:
-                        if prereleases or not vcls(k).is_prerelease:
-                            slist.append(k)
-                        else:
-                            logger.debug('skipping pre-release '
-                                         'version %s of %s', k, matcher.name)
-                except Exception:  # pragma: no cover
-                    logger.warning('error matching %s with %r', matcher, k)
-                    pass # slist.append(k)
-            if len(slist) > 1:
-                slist = sorted(slist, key=scheme.key)
-            if slist:
-                logger.debug('sorted list: %s', slist)
-                version = slist[-1]
-                result = versions[version]
-        if result:
-            if r.extras:
-                result.extras = r.extras
-            result.download_urls = versions.get('urls', {}).get(version, set())
-            d = {}
-            sd = versions.get('digests', {})
-            for url in result.download_urls:
-                if url in sd:  # pragma: no cover
-                    d[url] = sd[url]
-            result.digests = d
-        self.matcher = None
-        return result
-
-
-class PyPIRPCLocator(Locator):
-    """
-    This locator uses XML-RPC to locate distributions. It therefore
-    cannot be used with simple mirrors (that only mirror file content).
-    """
-    def __init__(self, url, **kwargs):
-        """
-        Initialise an instance.
-
-        :param url: The URL to use for XML-RPC.
-        :param kwargs: Passed to the superclass constructor.
-        """
-        super(PyPIRPCLocator, self).__init__(**kwargs)
-        self.base_url = url
-        self.client = ServerProxy(url, timeout=3.0)
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        return set(self.client.list_packages())
-
-    def _get_project(self, name):
-        result = {'urls': {}, 'digests': {}}
-        versions = self.client.package_releases(name, True)
-        for v in versions:
-            urls = self.client.release_urls(name, v)
-            data = self.client.release_data(name, v)
-            metadata = Metadata(scheme=self.scheme)
-            metadata.name = data['name']
-            metadata.version = data['version']
-            metadata.license = data.get('license')
-            metadata.keywords = data.get('keywords', [])
-            metadata.summary = data.get('summary')
-            dist = Distribution(metadata)
-            if urls:
-                info = urls[0]
-                metadata.source_url = info['url']
-                dist.digest = self._get_digest(info)
-                dist.locator = self
-                result[v] = dist
-                for info in urls:
-                    url = info['url']
-                    digest = self._get_digest(info)
-                    result['urls'].setdefault(v, set()).add(url)
-                    result['digests'][url] = digest
-        return result
-
-class PyPIJSONLocator(Locator):
-    """
-    This locator uses PyPI's JSON interface. It's very limited in functionality
-    and probably not worth using.
-    """
-    def __init__(self, url, **kwargs):
-        super(PyPIJSONLocator, self).__init__(**kwargs)
-        self.base_url = ensure_slash(url)
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        raise NotImplementedError('Not available from this locator')
-
-    def _get_project(self, name):
-        result = {'urls': {}, 'digests': {}}
-        url = urljoin(self.base_url, '%s/json' % quote(name))
-        try:
-            resp = self.opener.open(url)
-            data = resp.read().decode() # for now
-            d = json.loads(data)
-            md = Metadata(scheme=self.scheme)
-            data = d['info']
-            md.name = data['name']
-            md.version = data['version']
-            md.license = data.get('license')
-            md.keywords = data.get('keywords', [])
-            md.summary = data.get('summary')
-            dist = Distribution(md)
-            dist.locator = self
-            urls = d['urls']
-            result[md.version] = dist
-            for info in d['urls']:
-                url = info['url']
-                dist.download_urls.add(url)
-                dist.digests[url] = self._get_digest(info)
-                result['urls'].setdefault(md.version, set()).add(url)
-                result['digests'][url] = self._get_digest(info)
-            # Now get other releases
-            for version, infos in d['releases'].items():
-                if version == md.version:
-                    continue    # already done
-                omd = Metadata(scheme=self.scheme)
-                omd.name = md.name
-                omd.version = version
-                odist = Distribution(omd)
-                odist.locator = self
-                result[version] = odist
-                for info in infos:
-                    url = info['url']
-                    odist.download_urls.add(url)
-                    odist.digests[url] = self._get_digest(info)
-                    result['urls'].setdefault(version, set()).add(url)
-                    result['digests'][url] = self._get_digest(info)
-#            for info in urls:
-#                md.source_url = info['url']
-#                dist.digest = self._get_digest(info)
-#                dist.locator = self
-#                for info in urls:
-#                    url = info['url']
-#                    result['urls'].setdefault(md.version, set()).add(url)
-#                    result['digests'][url] = self._get_digest(info)
-        except Exception as e:
-            self.errors.put(text_type(e))
-            logger.exception('JSON fetch failed: %s', e)
-        return result
-
-
-class Page(object):
-    """
-    This class represents a scraped HTML page.
-    """
-    # The following slightly hairy-looking regex just looks for the contents of
-    # an anchor link, which has an attribute "href" either immediately preceded
-    # or immediately followed by a "rel" attribute. The attribute values can be
-    # declared with double quotes, single quotes or no quotes - which leads to
-    # the length of the expression.
-    _href = re.compile("""
-(rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))\\s+)?
-href\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*))
-(\\s+rel\\s*=\\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\\s\n]*)))?
-""", re.I | re.S | re.X)
-    _base = re.compile(r"""]+)""", re.I | re.S)
-
-    def __init__(self, data, url):
-        """
-        Initialise an instance with the Unicode page contents and the URL they
-        came from.
-        """
-        self.data = data
-        self.base_url = self.url = url
-        m = self._base.search(self.data)
-        if m:
-            self.base_url = m.group(1)
-
-    _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
-
-    @cached_property
-    def links(self):
-        """
-        Return the URLs of all the links on a page together with information
-        about their "rel" attribute, for determining which ones to treat as
-        downloads and which ones to queue for further scraping.
-        """
-        def clean(url):
-            "Tidy up an URL."
-            scheme, netloc, path, params, query, frag = urlparse(url)
-            return urlunparse((scheme, netloc, quote(path),
-                               params, query, frag))
-
-        result = set()
-        for match in self._href.finditer(self.data):
-            d = match.groupdict('')
-            rel = (d['rel1'] or d['rel2'] or d['rel3'] or
-                   d['rel4'] or d['rel5'] or d['rel6'])
-            url = d['url1'] or d['url2'] or d['url3']
-            url = urljoin(self.base_url, url)
-            url = unescape(url)
-            url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
-            result.add((url, rel))
-        # We sort the result, hoping to bring the most recent versions
-        # to the front
-        result = sorted(result, key=lambda t: t[0], reverse=True)
-        return result
-
-
-class SimpleScrapingLocator(Locator):
-    """
-    A locator which scrapes HTML pages to locate downloads for a distribution.
-    This runs multiple threads to do the I/O; performance is at least as good
-    as pip's PackageFinder, which works in an analogous fashion.
-    """
-
-    # These are used to deal with various Content-Encoding schemes.
-    decoders = {
-        'deflate': zlib.decompress,
-        'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(),
-        'none': lambda b: b,
-    }
-
-    def __init__(self, url, timeout=None, num_workers=10, **kwargs):
-        """
-        Initialise an instance.
-        :param url: The root URL to use for scraping.
-        :param timeout: The timeout, in seconds, to be applied to requests.
-                        This defaults to ``None`` (no timeout specified).
-        :param num_workers: The number of worker threads you want to do I/O,
-                            This defaults to 10.
-        :param kwargs: Passed to the superclass.
-        """
-        super(SimpleScrapingLocator, self).__init__(**kwargs)
-        self.base_url = ensure_slash(url)
-        self.timeout = timeout
-        self._page_cache = {}
-        self._seen = set()
-        self._to_fetch = queue.Queue()
-        self._bad_hosts = set()
-        self.skip_externals = False
-        self.num_workers = num_workers
-        self._lock = threading.RLock()
-        # See issue #45: we need to be resilient when the locator is used
-        # in a thread, e.g. with concurrent.futures. We can't use self._lock
-        # as it is for coordinating our internal threads - the ones created
-        # in _prepare_threads.
-        self._gplock = threading.RLock()
-        self.platform_check = False  # See issue #112
-
-    def _prepare_threads(self):
-        """
-        Threads are created only when get_project is called, and terminate
-        before it returns. They are there primarily to parallelise I/O (i.e.
-        fetching web pages).
-        """
-        self._threads = []
-        for i in range(self.num_workers):
-            t = threading.Thread(target=self._fetch)
-            t.setDaemon(True)
-            t.start()
-            self._threads.append(t)
-
-    def _wait_threads(self):
-        """
-        Tell all the threads to terminate (by sending a sentinel value) and
-        wait for them to do so.
-        """
-        # Note that you need two loops, since you can't say which
-        # thread will get each sentinel
-        for t in self._threads:
-            self._to_fetch.put(None)    # sentinel
-        for t in self._threads:
-            t.join()
-        self._threads = []
-
-    def _get_project(self, name):
-        result = {'urls': {}, 'digests': {}}
-        with self._gplock:
-            self.result = result
-            self.project_name = name
-            url = urljoin(self.base_url, '%s/' % quote(name))
-            self._seen.clear()
-            self._page_cache.clear()
-            self._prepare_threads()
-            try:
-                logger.debug('Queueing %s', url)
-                self._to_fetch.put(url)
-                self._to_fetch.join()
-            finally:
-                self._wait_threads()
-            del self.result
-        return result
-
-    platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
-                                    r'win(32|_amd64)|macosx_?\d+)\b', re.I)
-
-    def _is_platform_dependent(self, url):
-        """
-        Does an URL refer to a platform-specific download?
-        """
-        return self.platform_dependent.search(url)
-
-    def _process_download(self, url):
-        """
-        See if an URL is a suitable download for a project.
-
-        If it is, register information in the result dictionary (for
-        _get_project) about the specific version it's for.
-
-        Note that the return value isn't actually used other than as a boolean
-        value.
-        """
-        if self.platform_check and self._is_platform_dependent(url):
-            info = None
-        else:
-            info = self.convert_url_to_download_info(url, self.project_name)
-        logger.debug('process_download: %s -> %s', url, info)
-        if info:
-            with self._lock:    # needed because self.result is shared
-                self._update_version_data(self.result, info)
-        return info
-
-    def _should_queue(self, link, referrer, rel):
-        """
-        Determine whether a link URL from a referring page and with a
-        particular "rel" attribute should be queued for scraping.
-        """
-        scheme, netloc, path, _, _, _ = urlparse(link)
-        if path.endswith(self.source_extensions + self.binary_extensions +
-                         self.excluded_extensions):
-            result = False
-        elif self.skip_externals and not link.startswith(self.base_url):
-            result = False
-        elif not referrer.startswith(self.base_url):
-            result = False
-        elif rel not in ('homepage', 'download'):
-            result = False
-        elif scheme not in ('http', 'https', 'ftp'):
-            result = False
-        elif self._is_platform_dependent(link):
-            result = False
-        else:
-            host = netloc.split(':', 1)[0]
-            if host.lower() == 'localhost':
-                result = False
-            else:
-                result = True
-        logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
-                     referrer, result)
-        return result
-
-    def _fetch(self):
-        """
-        Get a URL to fetch from the work queue, get the HTML page, examine its
-        links for download candidates and candidates for further scraping.
-
-        This is a handy method to run in a thread.
-        """
-        while True:
-            url = self._to_fetch.get()
-            try:
-                if url:
-                    page = self.get_page(url)
-                    if page is None:    # e.g. after an error
-                        continue
-                    for link, rel in page.links:
-                        if link not in self._seen:
-                            try:
-                                self._seen.add(link)
-                                if (not self._process_download(link) and
-                                    self._should_queue(link, url, rel)):
-                                    logger.debug('Queueing %s from %s', link, url)
-                                    self._to_fetch.put(link)
-                            except MetadataInvalidError:  # e.g. invalid versions
-                                pass
-            except Exception as e:  # pragma: no cover
-                self.errors.put(text_type(e))
-            finally:
-                # always do this, to avoid hangs :-)
-                self._to_fetch.task_done()
-            if not url:
-                #logger.debug('Sentinel seen, quitting.')
-                break
-
-    def get_page(self, url):
-        """
-        Get the HTML for an URL, possibly from an in-memory cache.
-
-        XXX TODO Note: this cache is never actually cleared. It's assumed that
-        the data won't get stale over the lifetime of a locator instance (not
-        necessarily true for the default_locator).
-        """
-        # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
-        scheme, netloc, path, _, _, _ = urlparse(url)
-        if scheme == 'file' and os.path.isdir(url2pathname(path)):
-            url = urljoin(ensure_slash(url), 'index.html')
-
-        if url in self._page_cache:
-            result = self._page_cache[url]
-            logger.debug('Returning %s from cache: %s', url, result)
-        else:
-            host = netloc.split(':', 1)[0]
-            result = None
-            if host in self._bad_hosts:
-                logger.debug('Skipping %s due to bad host %s', url, host)
-            else:
-                req = Request(url, headers={'Accept-encoding': 'identity'})
-                try:
-                    logger.debug('Fetching %s', url)
-                    resp = self.opener.open(req, timeout=self.timeout)
-                    logger.debug('Fetched %s', url)
-                    headers = resp.info()
-                    content_type = headers.get('Content-Type', '')
-                    if HTML_CONTENT_TYPE.match(content_type):
-                        final_url = resp.geturl()
-                        data = resp.read()
-                        encoding = headers.get('Content-Encoding')
-                        if encoding:
-                            decoder = self.decoders[encoding]   # fail if not found
-                            data = decoder(data)
-                        encoding = 'utf-8'
-                        m = CHARSET.search(content_type)
-                        if m:
-                            encoding = m.group(1)
-                        try:
-                            data = data.decode(encoding)
-                        except UnicodeError:  # pragma: no cover
-                            data = data.decode('latin-1')    # fallback
-                        result = Page(data, final_url)
-                        self._page_cache[final_url] = result
-                except HTTPError as e:
-                    if e.code != 404:
-                        logger.exception('Fetch failed: %s: %s', url, e)
-                except URLError as e:  # pragma: no cover
-                    logger.exception('Fetch failed: %s: %s', url, e)
-                    with self._lock:
-                        self._bad_hosts.add(host)
-                except Exception as e:  # pragma: no cover
-                    logger.exception('Fetch failed: %s: %s', url, e)
-                finally:
-                    self._page_cache[url] = result   # even if None (failure)
-        return result
-
-    _distname_re = re.compile(']*>([^<]+)<')
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        result = set()
-        page = self.get_page(self.base_url)
-        if not page:
-            raise DistlibException('Unable to get %s' % self.base_url)
-        for match in self._distname_re.finditer(page.data):
-            result.add(match.group(1))
-        return result
-
-class DirectoryLocator(Locator):
-    """
-    This class locates distributions in a directory tree.
-    """
-
-    def __init__(self, path, **kwargs):
-        """
-        Initialise an instance.
-        :param path: The root of the directory tree to search.
-        :param kwargs: Passed to the superclass constructor,
-                       except for:
-                       * recursive - if True (the default), subdirectories are
-                         recursed into. If False, only the top-level directory
-                         is searched,
-        """
-        self.recursive = kwargs.pop('recursive', True)
-        super(DirectoryLocator, self).__init__(**kwargs)
-        path = os.path.abspath(path)
-        if not os.path.isdir(path):  # pragma: no cover
-            raise DistlibException('Not a directory: %r' % path)
-        self.base_dir = path
-
-    def should_include(self, filename, parent):
-        """
-        Should a filename be considered as a candidate for a distribution
-        archive? As well as the filename, the directory which contains it
-        is provided, though not used by the current implementation.
-        """
-        return filename.endswith(self.downloadable_extensions)
-
-    def _get_project(self, name):
-        result = {'urls': {}, 'digests': {}}
-        for root, dirs, files in os.walk(self.base_dir):
-            for fn in files:
-                if self.should_include(fn, root):
-                    fn = os.path.join(root, fn)
-                    url = urlunparse(('file', '',
-                                      pathname2url(os.path.abspath(fn)),
-                                      '', '', ''))
-                    info = self.convert_url_to_download_info(url, name)
-                    if info:
-                        self._update_version_data(result, info)
-            if not self.recursive:
-                break
-        return result
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        result = set()
-        for root, dirs, files in os.walk(self.base_dir):
-            for fn in files:
-                if self.should_include(fn, root):
-                    fn = os.path.join(root, fn)
-                    url = urlunparse(('file', '',
-                                      pathname2url(os.path.abspath(fn)),
-                                      '', '', ''))
-                    info = self.convert_url_to_download_info(url, None)
-                    if info:
-                        result.add(info['name'])
-            if not self.recursive:
-                break
-        return result
-
-class JSONLocator(Locator):
-    """
-    This locator uses special extended metadata (not available on PyPI) and is
-    the basis of performant dependency resolution in distlib. Other locators
-    require archive downloads before dependencies can be determined! As you
-    might imagine, that can be slow.
-    """
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        raise NotImplementedError('Not available from this locator')
-
-    def _get_project(self, name):
-        result = {'urls': {}, 'digests': {}}
-        data = get_project_data(name)
-        if data:
-            for info in data.get('files', []):
-                if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
-                    continue
-                # We don't store summary in project metadata as it makes
-                # the data bigger for no benefit during dependency
-                # resolution
-                dist = make_dist(data['name'], info['version'],
-                                 summary=data.get('summary',
-                                                  'Placeholder for summary'),
-                                 scheme=self.scheme)
-                md = dist.metadata
-                md.source_url = info['url']
-                # TODO SHA256 digest
-                if 'digest' in info and info['digest']:
-                    dist.digest = ('md5', info['digest'])
-                md.dependencies = info.get('requirements', {})
-                dist.exports = info.get('exports', {})
-                result[dist.version] = dist
-                result['urls'].setdefault(dist.version, set()).add(info['url'])
-        return result
-
-class DistPathLocator(Locator):
-    """
-    This locator finds installed distributions in a path. It can be useful for
-    adding to an :class:`AggregatingLocator`.
-    """
-    def __init__(self, distpath, **kwargs):
-        """
-        Initialise an instance.
-
-        :param distpath: A :class:`DistributionPath` instance to search.
-        """
-        super(DistPathLocator, self).__init__(**kwargs)
-        assert isinstance(distpath, DistributionPath)
-        self.distpath = distpath
-
-    def _get_project(self, name):
-        dist = self.distpath.get_distribution(name)
-        if dist is None:
-            result = {'urls': {}, 'digests': {}}
-        else:
-            result = {
-                dist.version: dist,
-                'urls': {dist.version: set([dist.source_url])},
-                'digests': {dist.version: set([None])}
-            }
-        return result
-
-
-class AggregatingLocator(Locator):
-    """
-    This class allows you to chain and/or merge a list of locators.
-    """
-    def __init__(self, *locators, **kwargs):
-        """
-        Initialise an instance.
-
-        :param locators: The list of locators to search.
-        :param kwargs: Passed to the superclass constructor,
-                       except for:
-                       * merge - if False (the default), the first successful
-                         search from any of the locators is returned. If True,
-                         the results from all locators are merged (this can be
-                         slow).
-        """
-        self.merge = kwargs.pop('merge', False)
-        self.locators = locators
-        super(AggregatingLocator, self).__init__(**kwargs)
-
-    def clear_cache(self):
-        super(AggregatingLocator, self).clear_cache()
-        for locator in self.locators:
-            locator.clear_cache()
-
-    def _set_scheme(self, value):
-        self._scheme = value
-        for locator in self.locators:
-            locator.scheme = value
-
-    scheme = property(Locator.scheme.fget, _set_scheme)
-
-    def _get_project(self, name):
-        result = {}
-        for locator in self.locators:
-            d = locator.get_project(name)
-            if d:
-                if self.merge:
-                    files = result.get('urls', {})
-                    digests = result.get('digests', {})
-                    # next line could overwrite result['urls'], result['digests']
-                    result.update(d)
-                    df = result.get('urls')
-                    if files and df:
-                        for k, v in files.items():
-                            if k in df:
-                                df[k] |= v
-                            else:
-                                df[k] = v
-                    dd = result.get('digests')
-                    if digests and dd:
-                        dd.update(digests)
-                else:
-                    # See issue #18. If any dists are found and we're looking
-                    # for specific constraints, we only return something if
-                    # a match is found. For example, if a DirectoryLocator
-                    # returns just foo (1.0) while we're looking for
-                    # foo (>= 2.0), we'll pretend there was nothing there so
-                    # that subsequent locators can be queried. Otherwise we
-                    # would just return foo (1.0) which would then lead to a
-                    # failure to find foo (>= 2.0), because other locators
-                    # weren't searched. Note that this only matters when
-                    # merge=False.
-                    if self.matcher is None:
-                        found = True
-                    else:
-                        found = False
-                        for k in d:
-                            if self.matcher.match(k):
-                                found = True
-                                break
-                    if found:
-                        result = d
-                        break
-        return result
-
-    def get_distribution_names(self):
-        """
-        Return all the distribution names known to this locator.
-        """
-        result = set()
-        for locator in self.locators:
-            try:
-                result |= locator.get_distribution_names()
-            except NotImplementedError:
-                pass
-        return result
-
-
-# We use a legacy scheme simply because most of the dists on PyPI use legacy
-# versions which don't conform to PEP 426 / PEP 440.
-default_locator = AggregatingLocator(
-                    JSONLocator(),
-                    SimpleScrapingLocator('https://pypi.org/simple/',
-                                          timeout=3.0),
-                    scheme='legacy')
-
-locate = default_locator.locate
-
-NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*'
-                             r'\(\s*(==\s*)?(?P[^)]+)\)$')
-
-class DependencyFinder(object):
-    """
-    Locate dependencies for distributions.
-    """
-
-    def __init__(self, locator=None):
-        """
-        Initialise an instance, using the specified locator
-        to locate distributions.
-        """
-        self.locator = locator or default_locator
-        self.scheme = get_scheme(self.locator.scheme)
-
-    def add_distribution(self, dist):
-        """
-        Add a distribution to the finder. This will update internal information
-        about who provides what.
-        :param dist: The distribution to add.
-        """
-        logger.debug('adding distribution %s', dist)
-        name = dist.key
-        self.dists_by_name[name] = dist
-        self.dists[(name, dist.version)] = dist
-        for p in dist.provides:
-            name, version = parse_name_and_version(p)
-            logger.debug('Add to provided: %s, %s, %s', name, version, dist)
-            self.provided.setdefault(name, set()).add((version, dist))
-
-    def remove_distribution(self, dist):
-        """
-        Remove a distribution from the finder. This will update internal
-        information about who provides what.
-        :param dist: The distribution to remove.
-        """
-        logger.debug('removing distribution %s', dist)
-        name = dist.key
-        del self.dists_by_name[name]
-        del self.dists[(name, dist.version)]
-        for p in dist.provides:
-            name, version = parse_name_and_version(p)
-            logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
-            s = self.provided[name]
-            s.remove((version, dist))
-            if not s:
-                del self.provided[name]
-
-    def get_matcher(self, reqt):
-        """
-        Get a version matcher for a requirement.
-        :param reqt: The requirement
-        :type reqt: str
-        :return: A version matcher (an instance of
-                 :class:`distlib.version.Matcher`).
-        """
-        try:
-            matcher = self.scheme.matcher(reqt)
-        except UnsupportedVersionError:  # pragma: no cover
-            # XXX compat-mode if cannot read the version
-            name = reqt.split()[0]
-            matcher = self.scheme.matcher(name)
-        return matcher
-
-    def find_providers(self, reqt):
-        """
-        Find the distributions which can fulfill a requirement.
-
-        :param reqt: The requirement.
-         :type reqt: str
-        :return: A set of distribution which can fulfill the requirement.
-        """
-        matcher = self.get_matcher(reqt)
-        name = matcher.key   # case-insensitive
-        result = set()
-        provided = self.provided
-        if name in provided:
-            for version, provider in provided[name]:
-                try:
-                    match = matcher.match(version)
-                except UnsupportedVersionError:
-                    match = False
-
-                if match:
-                    result.add(provider)
-                    break
-        return result
-
-    def try_to_replace(self, provider, other, problems):
-        """
-        Attempt to replace one provider with another. This is typically used
-        when resolving dependencies from multiple sources, e.g. A requires
-        (B >= 1.0) while C requires (B >= 1.1).
-
-        For successful replacement, ``provider`` must meet all the requirements
-        which ``other`` fulfills.
-
-        :param provider: The provider we are trying to replace with.
-        :param other: The provider we're trying to replace.
-        :param problems: If False is returned, this will contain what
-                         problems prevented replacement. This is currently
-                         a tuple of the literal string 'cantreplace',
-                         ``provider``, ``other``  and the set of requirements
-                         that ``provider`` couldn't fulfill.
-        :return: True if we can replace ``other`` with ``provider``, else
-                 False.
-        """
-        rlist = self.reqts[other]
-        unmatched = set()
-        for s in rlist:
-            matcher = self.get_matcher(s)
-            if not matcher.match(provider.version):
-                unmatched.add(s)
-        if unmatched:
-            # can't replace other with provider
-            problems.add(('cantreplace', provider, other,
-                          frozenset(unmatched)))
-            result = False
-        else:
-            # can replace other with provider
-            self.remove_distribution(other)
-            del self.reqts[other]
-            for s in rlist:
-                self.reqts.setdefault(provider, set()).add(s)
-            self.add_distribution(provider)
-            result = True
-        return result
-
-    def find(self, requirement, meta_extras=None, prereleases=False):
-        """
-        Find a distribution and all distributions it depends on.
-
-        :param requirement: The requirement specifying the distribution to
-                            find, or a Distribution instance.
-        :param meta_extras: A list of meta extras such as :test:, :build: and
-                            so on.
-        :param prereleases: If ``True``, allow pre-release versions to be
-                            returned - otherwise, don't return prereleases
-                            unless they're all that's available.
-
-        Return a set of :class:`Distribution` instances and a set of
-        problems.
-
-        The distributions returned should be such that they have the
-        :attr:`required` attribute set to ``True`` if they were
-        from the ``requirement`` passed to ``find()``, and they have the
-        :attr:`build_time_dependency` attribute set to ``True`` unless they
-        are post-installation dependencies of the ``requirement``.
-
-        The problems should be a tuple consisting of the string
-        ``'unsatisfied'`` and the requirement which couldn't be satisfied
-        by any distribution known to the locator.
-        """
-
-        self.provided = {}
-        self.dists = {}
-        self.dists_by_name = {}
-        self.reqts = {}
-
-        meta_extras = set(meta_extras or [])
-        if ':*:' in meta_extras:
-            meta_extras.remove(':*:')
-            # :meta: and :run: are implicitly included
-            meta_extras |= set([':test:', ':build:', ':dev:'])
-
-        if isinstance(requirement, Distribution):
-            dist = odist = requirement
-            logger.debug('passed %s as requirement', odist)
-        else:
-            dist = odist = self.locator.locate(requirement,
-                                               prereleases=prereleases)
-            if dist is None:
-                raise DistlibException('Unable to locate %r' % requirement)
-            logger.debug('located %s', odist)
-        dist.requested = True
-        problems = set()
-        todo = set([dist])
-        install_dists = set([odist])
-        while todo:
-            dist = todo.pop()
-            name = dist.key     # case-insensitive
-            if name not in self.dists_by_name:
-                self.add_distribution(dist)
-            else:
-                #import pdb; pdb.set_trace()
-                other = self.dists_by_name[name]
-                if other != dist:
-                    self.try_to_replace(dist, other, problems)
-
-            ireqts = dist.run_requires | dist.meta_requires
-            sreqts = dist.build_requires
-            ereqts = set()
-            if meta_extras and dist in install_dists:
-                for key in ('test', 'build', 'dev'):
-                    e = ':%s:' % key
-                    if e in meta_extras:
-                        ereqts |= getattr(dist, '%s_requires' % key)
-            all_reqts = ireqts | sreqts | ereqts
-            for r in all_reqts:
-                providers = self.find_providers(r)
-                if not providers:
-                    logger.debug('No providers found for %r', r)
-                    provider = self.locator.locate(r, prereleases=prereleases)
-                    # If no provider is found and we didn't consider
-                    # prereleases, consider them now.
-                    if provider is None and not prereleases:
-                        provider = self.locator.locate(r, prereleases=True)
-                    if provider is None:
-                        logger.debug('Cannot satisfy %r', r)
-                        problems.add(('unsatisfied', r))
-                    else:
-                        n, v = provider.key, provider.version
-                        if (n, v) not in self.dists:
-                            todo.add(provider)
-                        providers.add(provider)
-                        if r in ireqts and dist in install_dists:
-                            install_dists.add(provider)
-                            logger.debug('Adding %s to install_dists',
-                                         provider.name_and_version)
-                for p in providers:
-                    name = p.key
-                    if name not in self.dists_by_name:
-                        self.reqts.setdefault(p, set()).add(r)
-                    else:
-                        other = self.dists_by_name[name]
-                        if other != p:
-                            # see if other can be replaced by p
-                            self.try_to_replace(p, other, problems)
-
-        dists = set(self.dists.values())
-        for dist in dists:
-            dist.build_time_dependency = dist not in install_dists
-            if dist.build_time_dependency:
-                logger.debug('%s is a build-time dependency only.',
-                             dist.name_and_version)
-        logger.debug('find done for %s', odist)
-        return dists, problems
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py
deleted file mode 100644
index ca0fe44..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/manifest.py
+++ /dev/null
@@ -1,393 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012-2013 Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""
-Class representing the list of files in a distribution.
-
-Equivalent to distutils.filelist, but fixes some problems.
-"""
-import fnmatch
-import logging
-import os
-import re
-import sys
-
-from . import DistlibException
-from .compat import fsdecode
-from .util import convert_path
-
-
-__all__ = ['Manifest']
-
-logger = logging.getLogger(__name__)
-
-# a \ followed by some spaces + EOL
-_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
-_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
-
-#
-# Due to the different results returned by fnmatch.translate, we need
-# to do slightly different processing for Python 2.7 and 3.2 ... this needed
-# to be brought in for Python 3.6 onwards.
-#
-_PYTHON_VERSION = sys.version_info[:2]
-
-class Manifest(object):
-    """A list of files built by on exploring the filesystem and filtered by
-    applying various patterns to what we find there.
-    """
-
-    def __init__(self, base=None):
-        """
-        Initialise an instance.
-
-        :param base: The base directory to explore under.
-        """
-        self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
-        self.prefix = self.base + os.sep
-        self.allfiles = None
-        self.files = set()
-
-    #
-    # Public API
-    #
-
-    def findall(self):
-        """Find all files under the base and set ``allfiles`` to the absolute
-        pathnames of files found.
-        """
-        from stat import S_ISREG, S_ISDIR, S_ISLNK
-
-        self.allfiles = allfiles = []
-        root = self.base
-        stack = [root]
-        pop = stack.pop
-        push = stack.append
-
-        while stack:
-            root = pop()
-            names = os.listdir(root)
-
-            for name in names:
-                fullname = os.path.join(root, name)
-
-                # Avoid excess stat calls -- just one will do, thank you!
-                stat = os.stat(fullname)
-                mode = stat.st_mode
-                if S_ISREG(mode):
-                    allfiles.append(fsdecode(fullname))
-                elif S_ISDIR(mode) and not S_ISLNK(mode):
-                    push(fullname)
-
-    def add(self, item):
-        """
-        Add a file to the manifest.
-
-        :param item: The pathname to add. This can be relative to the base.
-        """
-        if not item.startswith(self.prefix):
-            item = os.path.join(self.base, item)
-        self.files.add(os.path.normpath(item))
-
-    def add_many(self, items):
-        """
-        Add a list of files to the manifest.
-
-        :param items: The pathnames to add. These can be relative to the base.
-        """
-        for item in items:
-            self.add(item)
-
-    def sorted(self, wantdirs=False):
-        """
-        Return sorted files in directory order
-        """
-
-        def add_dir(dirs, d):
-            dirs.add(d)
-            logger.debug('add_dir added %s', d)
-            if d != self.base:
-                parent, _ = os.path.split(d)
-                assert parent not in ('', '/')
-                add_dir(dirs, parent)
-
-        result = set(self.files)    # make a copy!
-        if wantdirs:
-            dirs = set()
-            for f in result:
-                add_dir(dirs, os.path.dirname(f))
-            result |= dirs
-        return [os.path.join(*path_tuple) for path_tuple in
-                sorted(os.path.split(path) for path in result)]
-
-    def clear(self):
-        """Clear all collected files."""
-        self.files = set()
-        self.allfiles = []
-
-    def process_directive(self, directive):
-        """
-        Process a directive which either adds some files from ``allfiles`` to
-        ``files``, or removes some files from ``files``.
-
-        :param directive: The directive to process. This should be in a format
-                     compatible with distutils ``MANIFEST.in`` files:
-
-                     http://docs.python.org/distutils/sourcedist.html#commands
-        """
-        # Parse the line: split it up, make sure the right number of words
-        # is there, and return the relevant words.  'action' is always
-        # defined: it's the first word of the line.  Which of the other
-        # three are defined depends on the action; it'll be either
-        # patterns, (dir and patterns), or (dirpattern).
-        action, patterns, thedir, dirpattern = self._parse_directive(directive)
-
-        # OK, now we know that the action is valid and we have the
-        # right number of words on the line for that action -- so we
-        # can proceed with minimal error-checking.
-        if action == 'include':
-            for pattern in patterns:
-                if not self._include_pattern(pattern, anchor=True):
-                    logger.warning('no files found matching %r', pattern)
-
-        elif action == 'exclude':
-            for pattern in patterns:
-                found = self._exclude_pattern(pattern, anchor=True)
-                #if not found:
-                #    logger.warning('no previously-included files '
-                #                   'found matching %r', pattern)
-
-        elif action == 'global-include':
-            for pattern in patterns:
-                if not self._include_pattern(pattern, anchor=False):
-                    logger.warning('no files found matching %r '
-                                   'anywhere in distribution', pattern)
-
-        elif action == 'global-exclude':
-            for pattern in patterns:
-                found = self._exclude_pattern(pattern, anchor=False)
-                #if not found:
-                #    logger.warning('no previously-included files '
-                #                   'matching %r found anywhere in '
-                #                   'distribution', pattern)
-
-        elif action == 'recursive-include':
-            for pattern in patterns:
-                if not self._include_pattern(pattern, prefix=thedir):
-                    logger.warning('no files found matching %r '
-                                   'under directory %r', pattern, thedir)
-
-        elif action == 'recursive-exclude':
-            for pattern in patterns:
-                found = self._exclude_pattern(pattern, prefix=thedir)
-                #if not found:
-                #    logger.warning('no previously-included files '
-                #                   'matching %r found under directory %r',
-                #                   pattern, thedir)
-
-        elif action == 'graft':
-            if not self._include_pattern(None, prefix=dirpattern):
-                logger.warning('no directories found matching %r',
-                               dirpattern)
-
-        elif action == 'prune':
-            if not self._exclude_pattern(None, prefix=dirpattern):
-                logger.warning('no previously-included directories found '
-                               'matching %r', dirpattern)
-        else:   # pragma: no cover
-            # This should never happen, as it should be caught in
-            # _parse_template_line
-            raise DistlibException(
-                'invalid action %r' % action)
-
-    #
-    # Private API
-    #
-
-    def _parse_directive(self, directive):
-        """
-        Validate a directive.
-        :param directive: The directive to validate.
-        :return: A tuple of action, patterns, thedir, dir_patterns
-        """
-        words = directive.split()
-        if len(words) == 1 and words[0] not in ('include', 'exclude',
-                                                'global-include',
-                                                'global-exclude',
-                                                'recursive-include',
-                                                'recursive-exclude',
-                                                'graft', 'prune'):
-            # no action given, let's use the default 'include'
-            words.insert(0, 'include')
-
-        action = words[0]
-        patterns = thedir = dir_pattern = None
-
-        if action in ('include', 'exclude',
-                      'global-include', 'global-exclude'):
-            if len(words) < 2:
-                raise DistlibException(
-                    '%r expects   ...' % action)
-
-            patterns = [convert_path(word) for word in words[1:]]
-
-        elif action in ('recursive-include', 'recursive-exclude'):
-            if len(words) < 3:
-                raise DistlibException(
-                    '%r expects    ...' % action)
-
-            thedir = convert_path(words[1])
-            patterns = [convert_path(word) for word in words[2:]]
-
-        elif action in ('graft', 'prune'):
-            if len(words) != 2:
-                raise DistlibException(
-                    '%r expects a single ' % action)
-
-            dir_pattern = convert_path(words[1])
-
-        else:
-            raise DistlibException('unknown action %r' % action)
-
-        return action, patterns, thedir, dir_pattern
-
-    def _include_pattern(self, pattern, anchor=True, prefix=None,
-                         is_regex=False):
-        """Select strings (presumably filenames) from 'self.files' that
-        match 'pattern', a Unix-style wildcard (glob) pattern.
-
-        Patterns are not quite the same as implemented by the 'fnmatch'
-        module: '*' and '?'  match non-special characters, where "special"
-        is platform-dependent: slash on Unix; colon, slash, and backslash on
-        DOS/Windows; and colon on Mac OS.
-
-        If 'anchor' is true (the default), then the pattern match is more
-        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
-        'anchor' is false, both of these will match.
-
-        If 'prefix' is supplied, then only filenames starting with 'prefix'
-        (itself a pattern) and ending with 'pattern', with anything in between
-        them, will match.  'anchor' is ignored in this case.
-
-        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
-        'pattern' is assumed to be either a string containing a regex or a
-        regex object -- no translation is done, the regex is just compiled
-        and used as-is.
-
-        Selected strings will be added to self.files.
-
-        Return True if files are found.
-        """
-        # XXX docstring lying about what the special chars are?
-        found = False
-        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
-
-        # delayed loading of allfiles list
-        if self.allfiles is None:
-            self.findall()
-
-        for name in self.allfiles:
-            if pattern_re.search(name):
-                self.files.add(name)
-                found = True
-        return found
-
-    def _exclude_pattern(self, pattern, anchor=True, prefix=None,
-                         is_regex=False):
-        """Remove strings (presumably filenames) from 'files' that match
-        'pattern'.
-
-        Other parameters are the same as for 'include_pattern()', above.
-        The list 'self.files' is modified in place. Return True if files are
-        found.
-
-        This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
-        packaging source distributions
-        """
-        found = False
-        pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
-        for f in list(self.files):
-            if pattern_re.search(f):
-                self.files.remove(f)
-                found = True
-        return found
-
-    def _translate_pattern(self, pattern, anchor=True, prefix=None,
-                           is_regex=False):
-        """Translate a shell-like wildcard pattern to a compiled regular
-        expression.
-
-        Return the compiled regex.  If 'is_regex' true,
-        then 'pattern' is directly compiled to a regex (if it's a string)
-        or just returned as-is (assumes it's a regex object).
-        """
-        if is_regex:
-            if isinstance(pattern, str):
-                return re.compile(pattern)
-            else:
-                return pattern
-
-        if _PYTHON_VERSION > (3, 2):
-            # ditch start and end characters
-            start, _, end = self._glob_to_re('_').partition('_')
-
-        if pattern:
-            pattern_re = self._glob_to_re(pattern)
-            if _PYTHON_VERSION > (3, 2):
-                assert pattern_re.startswith(start) and pattern_re.endswith(end)
-        else:
-            pattern_re = ''
-
-        base = re.escape(os.path.join(self.base, ''))
-        if prefix is not None:
-            # ditch end of pattern character
-            if _PYTHON_VERSION <= (3, 2):
-                empty_pattern = self._glob_to_re('')
-                prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
-            else:
-                prefix_re = self._glob_to_re(prefix)
-                assert prefix_re.startswith(start) and prefix_re.endswith(end)
-                prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
-            sep = os.sep
-            if os.sep == '\\':
-                sep = r'\\'
-            if _PYTHON_VERSION <= (3, 2):
-                pattern_re = '^' + base + sep.join((prefix_re,
-                                                    '.*' + pattern_re))
-            else:
-                pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
-                pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
-                                                  pattern_re, end)
-        else:  # no prefix -- respect anchor flag
-            if anchor:
-                if _PYTHON_VERSION <= (3, 2):
-                    pattern_re = '^' + base + pattern_re
-                else:
-                    pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
-
-        return re.compile(pattern_re)
-
-    def _glob_to_re(self, pattern):
-        """Translate a shell-like glob pattern to a regular expression.
-
-        Return a string containing the regex.  Differs from
-        'fnmatch.translate()' in that '*' does not match "special characters"
-        (which are platform-specific).
-        """
-        pattern_re = fnmatch.translate(pattern)
-
-        # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
-        # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
-        # and by extension they shouldn't match such "special characters" under
-        # any OS.  So change all non-escaped dots in the RE to match any
-        # character except the special characters (currently: just os.sep).
-        sep = os.sep
-        if os.sep == '\\':
-            # we're using a regex to manipulate a regex, so we need
-            # to escape the backslash twice
-            sep = r'\\\\'
-        escaped = r'\1[^%s]' % sep
-        pattern_re = re.sub(r'((? y,
-        '!=': lambda x, y: x != y,
-        '<':  lambda x, y: x < y,
-        '<=':  lambda x, y: x == y or x < y,
-        '>':  lambda x, y: x > y,
-        '>=':  lambda x, y: x == y or x > y,
-        'and': lambda x, y: x and y,
-        'or': lambda x, y: x or y,
-        'in': lambda x, y: x in y,
-        'not in': lambda x, y: x not in y,
-    }
-
-    def evaluate(self, expr, context):
-        """
-        Evaluate a marker expression returned by the :func:`parse_requirement`
-        function in the specified context.
-        """
-        if isinstance(expr, string_types):
-            if expr[0] in '\'"':
-                result = expr[1:-1]
-            else:
-                if expr not in context:
-                    raise SyntaxError('unknown variable: %s' % expr)
-                result = context[expr]
-        else:
-            assert isinstance(expr, dict)
-            op = expr['op']
-            if op not in self.operations:
-                raise NotImplementedError('op not implemented: %s' % op)
-            elhs = expr['lhs']
-            erhs = expr['rhs']
-            if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
-                raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
-
-            lhs = self.evaluate(elhs, context)
-            rhs = self.evaluate(erhs, context)
-            result = self.operations[op](lhs, rhs)
-        return result
-
-def default_context():
-    def format_full_version(info):
-        version = '%s.%s.%s' % (info.major, info.minor, info.micro)
-        kind = info.releaselevel
-        if kind != 'final':
-            version += kind[0] + str(info.serial)
-        return version
-
-    if hasattr(sys, 'implementation'):
-        implementation_version = format_full_version(sys.implementation.version)
-        implementation_name = sys.implementation.name
-    else:
-        implementation_version = '0'
-        implementation_name = ''
-
-    result = {
-        'implementation_name': implementation_name,
-        'implementation_version': implementation_version,
-        'os_name': os.name,
-        'platform_machine': platform.machine(),
-        'platform_python_implementation': platform.python_implementation(),
-        'platform_release': platform.release(),
-        'platform_system': platform.system(),
-        'platform_version': platform.version(),
-        'platform_in_venv': str(in_venv()),
-        'python_full_version': platform.python_version(),
-        'python_version': platform.python_version()[:3],
-        'sys_platform': sys.platform,
-    }
-    return result
-
-DEFAULT_CONTEXT = default_context()
-del default_context
-
-evaluator = Evaluator()
-
-def interpret(marker, execution_context=None):
-    """
-    Interpret a marker and return a result depending on environment.
-
-    :param marker: The marker to interpret.
-    :type marker: str
-    :param execution_context: The context used for name lookup.
-    :type execution_context: mapping
-    """
-    try:
-        expr, rest = parse_marker(marker)
-    except Exception as e:
-        raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
-    if rest and rest[0] != '#':
-        raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
-    context = dict(DEFAULT_CONTEXT)
-    if execution_context:
-        context.update(execution_context)
-    return evaluator.evaluate(expr, context)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py
deleted file mode 100644
index 2d61378..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/metadata.py
+++ /dev/null
@@ -1,1096 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""Implementation of the Metadata for Python packages PEPs.
-
-Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental).
-"""
-from __future__ import unicode_literals
-
-import codecs
-from email import message_from_file
-import json
-import logging
-import re
-
-
-from . import DistlibException, __version__
-from .compat import StringIO, string_types, text_type
-from .markers import interpret
-from .util import extract_by_key, get_extras
-from .version import get_scheme, PEP440_VERSION_RE
-
-logger = logging.getLogger(__name__)
-
-
-class MetadataMissingError(DistlibException):
-    """A required metadata is missing"""
-
-
-class MetadataConflictError(DistlibException):
-    """Attempt to read or write metadata fields that are conflictual."""
-
-
-class MetadataUnrecognizedVersionError(DistlibException):
-    """Unknown metadata version number."""
-
-
-class MetadataInvalidError(DistlibException):
-    """A metadata value is invalid"""
-
-# public API of this module
-__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
-
-# Encoding used for the PKG-INFO files
-PKG_INFO_ENCODING = 'utf-8'
-
-# preferred version. Hopefully will be changed
-# to 1.2 once PEP 345 is supported everywhere
-PKG_INFO_PREFERRED_VERSION = '1.1'
-
-_LINE_PREFIX_1_2 = re.compile('\n       \\|')
-_LINE_PREFIX_PRE_1_2 = re.compile('\n        ')
-_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
-               'Summary', 'Description',
-               'Keywords', 'Home-page', 'Author', 'Author-email',
-               'License')
-
-_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
-               'Supported-Platform', 'Summary', 'Description',
-               'Keywords', 'Home-page', 'Author', 'Author-email',
-               'License', 'Classifier', 'Download-URL', 'Obsoletes',
-               'Provides', 'Requires')
-
-_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
-                'Download-URL')
-
-_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
-               'Supported-Platform', 'Summary', 'Description',
-               'Keywords', 'Home-page', 'Author', 'Author-email',
-               'Maintainer', 'Maintainer-email', 'License',
-               'Classifier', 'Download-URL', 'Obsoletes-Dist',
-               'Project-URL', 'Provides-Dist', 'Requires-Dist',
-               'Requires-Python', 'Requires-External')
-
-_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
-                'Obsoletes-Dist', 'Requires-External', 'Maintainer',
-                'Maintainer-email', 'Project-URL')
-
-_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
-               'Supported-Platform', 'Summary', 'Description',
-               'Keywords', 'Home-page', 'Author', 'Author-email',
-               'Maintainer', 'Maintainer-email', 'License',
-               'Classifier', 'Download-URL', 'Obsoletes-Dist',
-               'Project-URL', 'Provides-Dist', 'Requires-Dist',
-               'Requires-Python', 'Requires-External', 'Private-Version',
-               'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
-               'Provides-Extra')
-
-_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
-                'Setup-Requires-Dist', 'Extension')
-
-# See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
-# the metadata. Include them in the tuple literal below to allow them
-# (for now).
-_566_FIELDS = _426_FIELDS + ('Description-Content-Type',
-                             'Requires', 'Provides')
-
-_566_MARKERS = ('Description-Content-Type',)
-
-_ALL_FIELDS = set()
-_ALL_FIELDS.update(_241_FIELDS)
-_ALL_FIELDS.update(_314_FIELDS)
-_ALL_FIELDS.update(_345_FIELDS)
-_ALL_FIELDS.update(_426_FIELDS)
-_ALL_FIELDS.update(_566_FIELDS)
-
-EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
-
-
-def _version2fieldlist(version):
-    if version == '1.0':
-        return _241_FIELDS
-    elif version == '1.1':
-        return _314_FIELDS
-    elif version == '1.2':
-        return _345_FIELDS
-    elif version in ('1.3', '2.1'):
-        return _345_FIELDS + _566_FIELDS
-    elif version == '2.0':
-        return _426_FIELDS
-    raise MetadataUnrecognizedVersionError(version)
-
-
-def _best_version(fields):
-    """Detect the best version depending on the fields used."""
-    def _has_marker(keys, markers):
-        for marker in markers:
-            if marker in keys:
-                return True
-        return False
-
-    keys = []
-    for key, value in fields.items():
-        if value in ([], 'UNKNOWN', None):
-            continue
-        keys.append(key)
-
-    possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.0', '2.1']
-
-    # first let's try to see if a field is not part of one of the version
-    for key in keys:
-        if key not in _241_FIELDS and '1.0' in possible_versions:
-            possible_versions.remove('1.0')
-            logger.debug('Removed 1.0 due to %s', key)
-        if key not in _314_FIELDS and '1.1' in possible_versions:
-            possible_versions.remove('1.1')
-            logger.debug('Removed 1.1 due to %s', key)
-        if key not in _345_FIELDS and '1.2' in possible_versions:
-            possible_versions.remove('1.2')
-            logger.debug('Removed 1.2 due to %s', key)
-        if key not in _566_FIELDS and '1.3' in possible_versions:
-            possible_versions.remove('1.3')
-            logger.debug('Removed 1.3 due to %s', key)
-        if key not in _566_FIELDS and '2.1' in possible_versions:
-            if key != 'Description':  # In 2.1, description allowed after headers
-                possible_versions.remove('2.1')
-                logger.debug('Removed 2.1 due to %s', key)
-        if key not in _426_FIELDS and '2.0' in possible_versions:
-            possible_versions.remove('2.0')
-            logger.debug('Removed 2.0 due to %s', key)
-
-    # possible_version contains qualified versions
-    if len(possible_versions) == 1:
-        return possible_versions[0]   # found !
-    elif len(possible_versions) == 0:
-        logger.debug('Out of options - unknown metadata set: %s', fields)
-        raise MetadataConflictError('Unknown metadata set')
-
-    # let's see if one unique marker is found
-    is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
-    is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
-    is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
-    is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
-    if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_0) > 1:
-        raise MetadataConflictError('You used incompatible 1.1/1.2/2.0/2.1 fields')
-
-    # we have the choice, 1.0, or 1.2, or 2.0
-    #   - 1.0 has a broken Summary field but works with all tools
-    #   - 1.1 is to avoid
-    #   - 1.2 fixes Summary but has little adoption
-    #   - 2.0 adds more features and is very new
-    if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_0:
-        # we couldn't find any specific marker
-        if PKG_INFO_PREFERRED_VERSION in possible_versions:
-            return PKG_INFO_PREFERRED_VERSION
-    if is_1_1:
-        return '1.1'
-    if is_1_2:
-        return '1.2'
-    if is_2_1:
-        return '2.1'
-
-    return '2.0'
-
-_ATTR2FIELD = {
-    'metadata_version': 'Metadata-Version',
-    'name': 'Name',
-    'version': 'Version',
-    'platform': 'Platform',
-    'supported_platform': 'Supported-Platform',
-    'summary': 'Summary',
-    'description': 'Description',
-    'keywords': 'Keywords',
-    'home_page': 'Home-page',
-    'author': 'Author',
-    'author_email': 'Author-email',
-    'maintainer': 'Maintainer',
-    'maintainer_email': 'Maintainer-email',
-    'license': 'License',
-    'classifier': 'Classifier',
-    'download_url': 'Download-URL',
-    'obsoletes_dist': 'Obsoletes-Dist',
-    'provides_dist': 'Provides-Dist',
-    'requires_dist': 'Requires-Dist',
-    'setup_requires_dist': 'Setup-Requires-Dist',
-    'requires_python': 'Requires-Python',
-    'requires_external': 'Requires-External',
-    'requires': 'Requires',
-    'provides': 'Provides',
-    'obsoletes': 'Obsoletes',
-    'project_url': 'Project-URL',
-    'private_version': 'Private-Version',
-    'obsoleted_by': 'Obsoleted-By',
-    'extension': 'Extension',
-    'provides_extra': 'Provides-Extra',
-}
-
-_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
-_VERSIONS_FIELDS = ('Requires-Python',)
-_VERSION_FIELDS = ('Version',)
-_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
-               'Requires', 'Provides', 'Obsoletes-Dist',
-               'Provides-Dist', 'Requires-Dist', 'Requires-External',
-               'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
-               'Provides-Extra', 'Extension')
-_LISTTUPLEFIELDS = ('Project-URL',)
-
-_ELEMENTSFIELD = ('Keywords',)
-
-_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
-
-_MISSING = object()
-
-_FILESAFE = re.compile('[^A-Za-z0-9.]+')
-
-
-def _get_name_and_version(name, version, for_filename=False):
-    """Return the distribution name with version.
-
-    If for_filename is true, return a filename-escaped form."""
-    if for_filename:
-        # For both name and version any runs of non-alphanumeric or '.'
-        # characters are replaced with a single '-'.  Additionally any
-        # spaces in the version string become '.'
-        name = _FILESAFE.sub('-', name)
-        version = _FILESAFE.sub('-', version.replace(' ', '.'))
-    return '%s-%s' % (name, version)
-
-
-class LegacyMetadata(object):
-    """The legacy metadata of a release.
-
-    Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
-    instantiate the class with one of these arguments (or none):
-    - *path*, the path to a metadata file
-    - *fileobj* give a file-like object with metadata as content
-    - *mapping* is a dict-like object
-    - *scheme* is a version scheme name
-    """
-    # TODO document the mapping API and UNKNOWN default key
-
-    def __init__(self, path=None, fileobj=None, mapping=None,
-                 scheme='default'):
-        if [path, fileobj, mapping].count(None) < 2:
-            raise TypeError('path, fileobj and mapping are exclusive')
-        self._fields = {}
-        self.requires_files = []
-        self._dependencies = None
-        self.scheme = scheme
-        if path is not None:
-            self.read(path)
-        elif fileobj is not None:
-            self.read_file(fileobj)
-        elif mapping is not None:
-            self.update(mapping)
-            self.set_metadata_version()
-
-    def set_metadata_version(self):
-        self._fields['Metadata-Version'] = _best_version(self._fields)
-
-    def _write_field(self, fileobj, name, value):
-        fileobj.write('%s: %s\n' % (name, value))
-
-    def __getitem__(self, name):
-        return self.get(name)
-
-    def __setitem__(self, name, value):
-        return self.set(name, value)
-
-    def __delitem__(self, name):
-        field_name = self._convert_name(name)
-        try:
-            del self._fields[field_name]
-        except KeyError:
-            raise KeyError(name)
-
-    def __contains__(self, name):
-        return (name in self._fields or
-                self._convert_name(name) in self._fields)
-
-    def _convert_name(self, name):
-        if name in _ALL_FIELDS:
-            return name
-        name = name.replace('-', '_').lower()
-        return _ATTR2FIELD.get(name, name)
-
-    def _default_value(self, name):
-        if name in _LISTFIELDS or name in _ELEMENTSFIELD:
-            return []
-        return 'UNKNOWN'
-
-    def _remove_line_prefix(self, value):
-        if self.metadata_version in ('1.0', '1.1'):
-            return _LINE_PREFIX_PRE_1_2.sub('\n', value)
-        else:
-            return _LINE_PREFIX_1_2.sub('\n', value)
-
-    def __getattr__(self, name):
-        if name in _ATTR2FIELD:
-            return self[name]
-        raise AttributeError(name)
-
-    #
-    # Public API
-    #
-
-#    dependencies = property(_get_dependencies, _set_dependencies)
-
-    def get_fullname(self, filesafe=False):
-        """Return the distribution name with version.
-
-        If filesafe is true, return a filename-escaped form."""
-        return _get_name_and_version(self['Name'], self['Version'], filesafe)
-
-    def is_field(self, name):
-        """return True if name is a valid metadata key"""
-        name = self._convert_name(name)
-        return name in _ALL_FIELDS
-
-    def is_multi_field(self, name):
-        name = self._convert_name(name)
-        return name in _LISTFIELDS
-
-    def read(self, filepath):
-        """Read the metadata values from a file path."""
-        fp = codecs.open(filepath, 'r', encoding='utf-8')
-        try:
-            self.read_file(fp)
-        finally:
-            fp.close()
-
-    def read_file(self, fileob):
-        """Read the metadata values from a file object."""
-        msg = message_from_file(fileob)
-        self._fields['Metadata-Version'] = msg['metadata-version']
-
-        # When reading, get all the fields we can
-        for field in _ALL_FIELDS:
-            if field not in msg:
-                continue
-            if field in _LISTFIELDS:
-                # we can have multiple lines
-                values = msg.get_all(field)
-                if field in _LISTTUPLEFIELDS and values is not None:
-                    values = [tuple(value.split(',')) for value in values]
-                self.set(field, values)
-            else:
-                # single line
-                value = msg[field]
-                if value is not None and value != 'UNKNOWN':
-                    self.set(field, value)
-        # logger.debug('Attempting to set metadata for %s', self)
-        # self.set_metadata_version()
-
-    def write(self, filepath, skip_unknown=False):
-        """Write the metadata fields to filepath."""
-        fp = codecs.open(filepath, 'w', encoding='utf-8')
-        try:
-            self.write_file(fp, skip_unknown)
-        finally:
-            fp.close()
-
-    def write_file(self, fileobject, skip_unknown=False):
-        """Write the PKG-INFO format data to a file object."""
-        self.set_metadata_version()
-
-        for field in _version2fieldlist(self['Metadata-Version']):
-            values = self.get(field)
-            if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
-                continue
-            if field in _ELEMENTSFIELD:
-                self._write_field(fileobject, field, ','.join(values))
-                continue
-            if field not in _LISTFIELDS:
-                if field == 'Description':
-                    if self.metadata_version in ('1.0', '1.1'):
-                        values = values.replace('\n', '\n        ')
-                    else:
-                        values = values.replace('\n', '\n       |')
-                values = [values]
-
-            if field in _LISTTUPLEFIELDS:
-                values = [','.join(value) for value in values]
-
-            for value in values:
-                self._write_field(fileobject, field, value)
-
-    def update(self, other=None, **kwargs):
-        """Set metadata values from the given iterable `other` and kwargs.
-
-        Behavior is like `dict.update`: If `other` has a ``keys`` method,
-        they are looped over and ``self[key]`` is assigned ``other[key]``.
-        Else, ``other`` is an iterable of ``(key, value)`` iterables.
-
-        Keys that don't match a metadata field or that have an empty value are
-        dropped.
-        """
-        def _set(key, value):
-            if key in _ATTR2FIELD and value:
-                self.set(self._convert_name(key), value)
-
-        if not other:
-            # other is None or empty container
-            pass
-        elif hasattr(other, 'keys'):
-            for k in other.keys():
-                _set(k, other[k])
-        else:
-            for k, v in other:
-                _set(k, v)
-
-        if kwargs:
-            for k, v in kwargs.items():
-                _set(k, v)
-
-    def set(self, name, value):
-        """Control then set a metadata field."""
-        name = self._convert_name(name)
-
-        if ((name in _ELEMENTSFIELD or name == 'Platform') and
-            not isinstance(value, (list, tuple))):
-            if isinstance(value, string_types):
-                value = [v.strip() for v in value.split(',')]
-            else:
-                value = []
-        elif (name in _LISTFIELDS and
-              not isinstance(value, (list, tuple))):
-            if isinstance(value, string_types):
-                value = [value]
-            else:
-                value = []
-
-        if logger.isEnabledFor(logging.WARNING):
-            project_name = self['Name']
-
-            scheme = get_scheme(self.scheme)
-            if name in _PREDICATE_FIELDS and value is not None:
-                for v in value:
-                    # check that the values are valid
-                    if not scheme.is_valid_matcher(v.split(';')[0]):
-                        logger.warning(
-                            "'%s': '%s' is not valid (field '%s')",
-                            project_name, v, name)
-            # FIXME this rejects UNKNOWN, is that right?
-            elif name in _VERSIONS_FIELDS and value is not None:
-                if not scheme.is_valid_constraint_list(value):
-                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
-                                   project_name, value, name)
-            elif name in _VERSION_FIELDS and value is not None:
-                if not scheme.is_valid_version(value):
-                    logger.warning("'%s': '%s' is not a valid version (field '%s')",
-                                   project_name, value, name)
-
-        if name in _UNICODEFIELDS:
-            if name == 'Description':
-                value = self._remove_line_prefix(value)
-
-        self._fields[name] = value
-
-    def get(self, name, default=_MISSING):
-        """Get a metadata field."""
-        name = self._convert_name(name)
-        if name not in self._fields:
-            if default is _MISSING:
-                default = self._default_value(name)
-            return default
-        if name in _UNICODEFIELDS:
-            value = self._fields[name]
-            return value
-        elif name in _LISTFIELDS:
-            value = self._fields[name]
-            if value is None:
-                return []
-            res = []
-            for val in value:
-                if name not in _LISTTUPLEFIELDS:
-                    res.append(val)
-                else:
-                    # That's for Project-URL
-                    res.append((val[0], val[1]))
-            return res
-
-        elif name in _ELEMENTSFIELD:
-            value = self._fields[name]
-            if isinstance(value, string_types):
-                return value.split(',')
-        return self._fields[name]
-
-    def check(self, strict=False):
-        """Check if the metadata is compliant. If strict is True then raise if
-        no Name or Version are provided"""
-        self.set_metadata_version()
-
-        # XXX should check the versions (if the file was loaded)
-        missing, warnings = [], []
-
-        for attr in ('Name', 'Version'):  # required by PEP 345
-            if attr not in self:
-                missing.append(attr)
-
-        if strict and missing != []:
-            msg = 'missing required metadata: %s' % ', '.join(missing)
-            raise MetadataMissingError(msg)
-
-        for attr in ('Home-page', 'Author'):
-            if attr not in self:
-                missing.append(attr)
-
-        # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
-        if self['Metadata-Version'] != '1.2':
-            return missing, warnings
-
-        scheme = get_scheme(self.scheme)
-
-        def are_valid_constraints(value):
-            for v in value:
-                if not scheme.is_valid_matcher(v.split(';')[0]):
-                    return False
-            return True
-
-        for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
-                                   (_VERSIONS_FIELDS,
-                                    scheme.is_valid_constraint_list),
-                                   (_VERSION_FIELDS,
-                                    scheme.is_valid_version)):
-            for field in fields:
-                value = self.get(field, None)
-                if value is not None and not controller(value):
-                    warnings.append("Wrong value for '%s': %s" % (field, value))
-
-        return missing, warnings
-
-    def todict(self, skip_missing=False):
-        """Return fields as a dict.
-
-        Field names will be converted to use the underscore-lowercase style
-        instead of hyphen-mixed case (i.e. home_page instead of Home-page).
-        """
-        self.set_metadata_version()
-
-        mapping_1_0 = (
-            ('metadata_version', 'Metadata-Version'),
-            ('name', 'Name'),
-            ('version', 'Version'),
-            ('summary', 'Summary'),
-            ('home_page', 'Home-page'),
-            ('author', 'Author'),
-            ('author_email', 'Author-email'),
-            ('license', 'License'),
-            ('description', 'Description'),
-            ('keywords', 'Keywords'),
-            ('platform', 'Platform'),
-            ('classifiers', 'Classifier'),
-            ('download_url', 'Download-URL'),
-        )
-
-        data = {}
-        for key, field_name in mapping_1_0:
-            if not skip_missing or field_name in self._fields:
-                data[key] = self[field_name]
-
-        if self['Metadata-Version'] == '1.2':
-            mapping_1_2 = (
-                ('requires_dist', 'Requires-Dist'),
-                ('requires_python', 'Requires-Python'),
-                ('requires_external', 'Requires-External'),
-                ('provides_dist', 'Provides-Dist'),
-                ('obsoletes_dist', 'Obsoletes-Dist'),
-                ('project_url', 'Project-URL'),
-                ('maintainer', 'Maintainer'),
-                ('maintainer_email', 'Maintainer-email'),
-            )
-            for key, field_name in mapping_1_2:
-                if not skip_missing or field_name in self._fields:
-                    if key != 'project_url':
-                        data[key] = self[field_name]
-                    else:
-                        data[key] = [','.join(u) for u in self[field_name]]
-
-        elif self['Metadata-Version'] == '1.1':
-            mapping_1_1 = (
-                ('provides', 'Provides'),
-                ('requires', 'Requires'),
-                ('obsoletes', 'Obsoletes'),
-            )
-            for key, field_name in mapping_1_1:
-                if not skip_missing or field_name in self._fields:
-                    data[key] = self[field_name]
-
-        return data
-
-    def add_requirements(self, requirements):
-        if self['Metadata-Version'] == '1.1':
-            # we can't have 1.1 metadata *and* Setuptools requires
-            for field in ('Obsoletes', 'Requires', 'Provides'):
-                if field in self:
-                    del self[field]
-        self['Requires-Dist'] += requirements
-
-    # Mapping API
-    # TODO could add iter* variants
-
-    def keys(self):
-        return list(_version2fieldlist(self['Metadata-Version']))
-
-    def __iter__(self):
-        for key in self.keys():
-            yield key
-
-    def values(self):
-        return [self[key] for key in self.keys()]
-
-    def items(self):
-        return [(key, self[key]) for key in self.keys()]
-
-    def __repr__(self):
-        return '<%s %s %s>' % (self.__class__.__name__, self.name,
-                               self.version)
-
-
-METADATA_FILENAME = 'pydist.json'
-WHEEL_METADATA_FILENAME = 'metadata.json'
-LEGACY_METADATA_FILENAME = 'METADATA'
-
-
-class Metadata(object):
-    """
-    The metadata of a release. This implementation uses 2.0 (JSON)
-    metadata where possible. If not possible, it wraps a LegacyMetadata
-    instance which handles the key-value metadata format.
-    """
-
-    METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
-
-    NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
-
-    VERSION_MATCHER = PEP440_VERSION_RE
-
-    SUMMARY_MATCHER = re.compile('.{1,2047}')
-
-    METADATA_VERSION = '2.0'
-
-    GENERATOR = 'distlib (%s)' % __version__
-
-    MANDATORY_KEYS = {
-        'name': (),
-        'version': (),
-        'summary': ('legacy',),
-    }
-
-    INDEX_KEYS = ('name version license summary description author '
-                  'author_email keywords platform home_page classifiers '
-                  'download_url')
-
-    DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
-                       'dev_requires provides meta_requires obsoleted_by '
-                       'supports_environments')
-
-    SYNTAX_VALIDATORS = {
-        'metadata_version': (METADATA_VERSION_MATCHER, ()),
-        'name': (NAME_MATCHER, ('legacy',)),
-        'version': (VERSION_MATCHER, ('legacy',)),
-        'summary': (SUMMARY_MATCHER, ('legacy',)),
-    }
-
-    __slots__ = ('_legacy', '_data', 'scheme')
-
-    def __init__(self, path=None, fileobj=None, mapping=None,
-                 scheme='default'):
-        if [path, fileobj, mapping].count(None) < 2:
-            raise TypeError('path, fileobj and mapping are exclusive')
-        self._legacy = None
-        self._data = None
-        self.scheme = scheme
-        #import pdb; pdb.set_trace()
-        if mapping is not None:
-            try:
-                self._validate_mapping(mapping, scheme)
-                self._data = mapping
-            except MetadataUnrecognizedVersionError:
-                self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
-                self.validate()
-        else:
-            data = None
-            if path:
-                with open(path, 'rb') as f:
-                    data = f.read()
-            elif fileobj:
-                data = fileobj.read()
-            if data is None:
-                # Initialised with no args - to be added
-                self._data = {
-                    'metadata_version': self.METADATA_VERSION,
-                    'generator': self.GENERATOR,
-                }
-            else:
-                if not isinstance(data, text_type):
-                    data = data.decode('utf-8')
-                try:
-                    self._data = json.loads(data)
-                    self._validate_mapping(self._data, scheme)
-                except ValueError:
-                    # Note: MetadataUnrecognizedVersionError does not
-                    # inherit from ValueError (it's a DistlibException,
-                    # which should not inherit from ValueError).
-                    # The ValueError comes from the json.load - if that
-                    # succeeds and we get a validation error, we want
-                    # that to propagate
-                    self._legacy = LegacyMetadata(fileobj=StringIO(data),
-                                                  scheme=scheme)
-                    self.validate()
-
-    common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
-
-    none_list = (None, list)
-    none_dict = (None, dict)
-
-    mapped_keys = {
-        'run_requires': ('Requires-Dist', list),
-        'build_requires': ('Setup-Requires-Dist', list),
-        'dev_requires': none_list,
-        'test_requires': none_list,
-        'meta_requires': none_list,
-        'extras': ('Provides-Extra', list),
-        'modules': none_list,
-        'namespaces': none_list,
-        'exports': none_dict,
-        'commands': none_dict,
-        'classifiers': ('Classifier', list),
-        'source_url': ('Download-URL', None),
-        'metadata_version': ('Metadata-Version', None),
-    }
-
-    del none_list, none_dict
-
-    def __getattribute__(self, key):
-        common = object.__getattribute__(self, 'common_keys')
-        mapped = object.__getattribute__(self, 'mapped_keys')
-        if key in mapped:
-            lk, maker = mapped[key]
-            if self._legacy:
-                if lk is None:
-                    result = None if maker is None else maker()
-                else:
-                    result = self._legacy.get(lk)
-            else:
-                value = None if maker is None else maker()
-                if key not in ('commands', 'exports', 'modules', 'namespaces',
-                               'classifiers'):
-                    result = self._data.get(key, value)
-                else:
-                    # special cases for PEP 459
-                    sentinel = object()
-                    result = sentinel
-                    d = self._data.get('extensions')
-                    if d:
-                        if key == 'commands':
-                            result = d.get('python.commands', value)
-                        elif key == 'classifiers':
-                            d = d.get('python.details')
-                            if d:
-                                result = d.get(key, value)
-                        else:
-                            d = d.get('python.exports')
-                            if not d:
-                                d = self._data.get('python.exports')
-                            if d:
-                                result = d.get(key, value)
-                    if result is sentinel:
-                        result = value
-        elif key not in common:
-            result = object.__getattribute__(self, key)
-        elif self._legacy:
-            result = self._legacy.get(key)
-        else:
-            result = self._data.get(key)
-        return result
-
-    def _validate_value(self, key, value, scheme=None):
-        if key in self.SYNTAX_VALIDATORS:
-            pattern, exclusions = self.SYNTAX_VALIDATORS[key]
-            if (scheme or self.scheme) not in exclusions:
-                m = pattern.match(value)
-                if not m:
-                    raise MetadataInvalidError("'%s' is an invalid value for "
-                                               "the '%s' property" % (value,
-                                                                    key))
-
-    def __setattr__(self, key, value):
-        self._validate_value(key, value)
-        common = object.__getattribute__(self, 'common_keys')
-        mapped = object.__getattribute__(self, 'mapped_keys')
-        if key in mapped:
-            lk, _ = mapped[key]
-            if self._legacy:
-                if lk is None:
-                    raise NotImplementedError
-                self._legacy[lk] = value
-            elif key not in ('commands', 'exports', 'modules', 'namespaces',
-                             'classifiers'):
-                self._data[key] = value
-            else:
-                # special cases for PEP 459
-                d = self._data.setdefault('extensions', {})
-                if key == 'commands':
-                    d['python.commands'] = value
-                elif key == 'classifiers':
-                    d = d.setdefault('python.details', {})
-                    d[key] = value
-                else:
-                    d = d.setdefault('python.exports', {})
-                    d[key] = value
-        elif key not in common:
-            object.__setattr__(self, key, value)
-        else:
-            if key == 'keywords':
-                if isinstance(value, string_types):
-                    value = value.strip()
-                    if value:
-                        value = value.split()
-                    else:
-                        value = []
-            if self._legacy:
-                self._legacy[key] = value
-            else:
-                self._data[key] = value
-
-    @property
-    def name_and_version(self):
-        return _get_name_and_version(self.name, self.version, True)
-
-    @property
-    def provides(self):
-        if self._legacy:
-            result = self._legacy['Provides-Dist']
-        else:
-            result = self._data.setdefault('provides', [])
-        s = '%s (%s)' % (self.name, self.version)
-        if s not in result:
-            result.append(s)
-        return result
-
-    @provides.setter
-    def provides(self, value):
-        if self._legacy:
-            self._legacy['Provides-Dist'] = value
-        else:
-            self._data['provides'] = value
-
-    def get_requirements(self, reqts, extras=None, env=None):
-        """
-        Base method to get dependencies, given a set of extras
-        to satisfy and an optional environment context.
-        :param reqts: A list of sometimes-wanted dependencies,
-                      perhaps dependent on extras and environment.
-        :param extras: A list of optional components being requested.
-        :param env: An optional environment for marker evaluation.
-        """
-        if self._legacy:
-            result = reqts
-        else:
-            result = []
-            extras = get_extras(extras or [], self.extras)
-            for d in reqts:
-                if 'extra' not in d and 'environment' not in d:
-                    # unconditional
-                    include = True
-                else:
-                    if 'extra' not in d:
-                        # Not extra-dependent - only environment-dependent
-                        include = True
-                    else:
-                        include = d.get('extra') in extras
-                    if include:
-                        # Not excluded because of extras, check environment
-                        marker = d.get('environment')
-                        if marker:
-                            include = interpret(marker, env)
-                if include:
-                    result.extend(d['requires'])
-            for key in ('build', 'dev', 'test'):
-                e = ':%s:' % key
-                if e in extras:
-                    extras.remove(e)
-                    # A recursive call, but it should terminate since 'test'
-                    # has been removed from the extras
-                    reqts = self._data.get('%s_requires' % key, [])
-                    result.extend(self.get_requirements(reqts, extras=extras,
-                                                        env=env))
-        return result
-
-    @property
-    def dictionary(self):
-        if self._legacy:
-            return self._from_legacy()
-        return self._data
-
-    @property
-    def dependencies(self):
-        if self._legacy:
-            raise NotImplementedError
-        else:
-            return extract_by_key(self._data, self.DEPENDENCY_KEYS)
-
-    @dependencies.setter
-    def dependencies(self, value):
-        if self._legacy:
-            raise NotImplementedError
-        else:
-            self._data.update(value)
-
-    def _validate_mapping(self, mapping, scheme):
-        if mapping.get('metadata_version') != self.METADATA_VERSION:
-            raise MetadataUnrecognizedVersionError()
-        missing = []
-        for key, exclusions in self.MANDATORY_KEYS.items():
-            if key not in mapping:
-                if scheme not in exclusions:
-                    missing.append(key)
-        if missing:
-            msg = 'Missing metadata items: %s' % ', '.join(missing)
-            raise MetadataMissingError(msg)
-        for k, v in mapping.items():
-            self._validate_value(k, v, scheme)
-
-    def validate(self):
-        if self._legacy:
-            missing, warnings = self._legacy.check(True)
-            if missing or warnings:
-                logger.warning('Metadata: missing: %s, warnings: %s',
-                               missing, warnings)
-        else:
-            self._validate_mapping(self._data, self.scheme)
-
-    def todict(self):
-        if self._legacy:
-            return self._legacy.todict(True)
-        else:
-            result = extract_by_key(self._data, self.INDEX_KEYS)
-            return result
-
-    def _from_legacy(self):
-        assert self._legacy and not self._data
-        result = {
-            'metadata_version': self.METADATA_VERSION,
-            'generator': self.GENERATOR,
-        }
-        lmd = self._legacy.todict(True)     # skip missing ones
-        for k in ('name', 'version', 'license', 'summary', 'description',
-                  'classifier'):
-            if k in lmd:
-                if k == 'classifier':
-                    nk = 'classifiers'
-                else:
-                    nk = k
-                result[nk] = lmd[k]
-        kw = lmd.get('Keywords', [])
-        if kw == ['']:
-            kw = []
-        result['keywords'] = kw
-        keys = (('requires_dist', 'run_requires'),
-                ('setup_requires_dist', 'build_requires'))
-        for ok, nk in keys:
-            if ok in lmd and lmd[ok]:
-                result[nk] = [{'requires': lmd[ok]}]
-        result['provides'] = self.provides
-        author = {}
-        maintainer = {}
-        return result
-
-    LEGACY_MAPPING = {
-        'name': 'Name',
-        'version': 'Version',
-        'license': 'License',
-        'summary': 'Summary',
-        'description': 'Description',
-        'classifiers': 'Classifier',
-    }
-
-    def _to_legacy(self):
-        def process_entries(entries):
-            reqts = set()
-            for e in entries:
-                extra = e.get('extra')
-                env = e.get('environment')
-                rlist = e['requires']
-                for r in rlist:
-                    if not env and not extra:
-                        reqts.add(r)
-                    else:
-                        marker = ''
-                        if extra:
-                            marker = 'extra == "%s"' % extra
-                        if env:
-                            if marker:
-                                marker = '(%s) and %s' % (env, marker)
-                            else:
-                                marker = env
-                        reqts.add(';'.join((r, marker)))
-            return reqts
-
-        assert self._data and not self._legacy
-        result = LegacyMetadata()
-        nmd = self._data
-        for nk, ok in self.LEGACY_MAPPING.items():
-            if nk in nmd:
-                result[ok] = nmd[nk]
-        r1 = process_entries(self.run_requires + self.meta_requires)
-        r2 = process_entries(self.build_requires + self.dev_requires)
-        if self.extras:
-            result['Provides-Extra'] = sorted(self.extras)
-        result['Requires-Dist'] = sorted(r1)
-        result['Setup-Requires-Dist'] = sorted(r2)
-        # TODO: other fields such as contacts
-        return result
-
-    def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
-        if [path, fileobj].count(None) != 1:
-            raise ValueError('Exactly one of path and fileobj is needed')
-        self.validate()
-        if legacy:
-            if self._legacy:
-                legacy_md = self._legacy
-            else:
-                legacy_md = self._to_legacy()
-            if path:
-                legacy_md.write(path, skip_unknown=skip_unknown)
-            else:
-                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
-        else:
-            if self._legacy:
-                d = self._from_legacy()
-            else:
-                d = self._data
-            if fileobj:
-                json.dump(d, fileobj, ensure_ascii=True, indent=2,
-                          sort_keys=True)
-            else:
-                with codecs.open(path, 'w', 'utf-8') as f:
-                    json.dump(d, f, ensure_ascii=True, indent=2,
-                              sort_keys=True)
-
-    def add_requirements(self, requirements):
-        if self._legacy:
-            self._legacy.add_requirements(requirements)
-        else:
-            run_requires = self._data.setdefault('run_requires', [])
-            always = None
-            for entry in run_requires:
-                if 'environment' not in entry and 'extra' not in entry:
-                    always = entry
-                    break
-            if always is None:
-                always = { 'requires': requirements }
-                run_requires.insert(0, always)
-            else:
-                rset = set(always['requires']) | set(requirements)
-                always['requires'] = sorted(rset)
-
-    def __repr__(self):
-        name = self.name or '(no name)'
-        version = self.version or 'no version'
-        return '<%s %s %s (%s)>' % (self.__class__.__name__,
-                                    self.metadata_version, name, version)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py
deleted file mode 100644
index 1884016..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/resources.py
+++ /dev/null
@@ -1,355 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2013-2017 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-from __future__ import unicode_literals
-
-import bisect
-import io
-import logging
-import os
-import pkgutil
-import shutil
-import sys
-import types
-import zipimport
-
-from . import DistlibException
-from .util import cached_property, get_cache_base, path_to_cache_dir, Cache
-
-logger = logging.getLogger(__name__)
-
-
-cache = None    # created when needed
-
-
-class ResourceCache(Cache):
-    def __init__(self, base=None):
-        if base is None:
-            # Use native string to avoid issues on 2.x: see Python #20140.
-            base = os.path.join(get_cache_base(), str('resource-cache'))
-        super(ResourceCache, self).__init__(base)
-
-    def is_stale(self, resource, path):
-        """
-        Is the cache stale for the given resource?
-
-        :param resource: The :class:`Resource` being cached.
-        :param path: The path of the resource in the cache.
-        :return: True if the cache is stale.
-        """
-        # Cache invalidation is a hard problem :-)
-        return True
-
-    def get(self, resource):
-        """
-        Get a resource into the cache,
-
-        :param resource: A :class:`Resource` instance.
-        :return: The pathname of the resource in the cache.
-        """
-        prefix, path = resource.finder.get_cache_info(resource)
-        if prefix is None:
-            result = path
-        else:
-            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
-            dirname = os.path.dirname(result)
-            if not os.path.isdir(dirname):
-                os.makedirs(dirname)
-            if not os.path.exists(result):
-                stale = True
-            else:
-                stale = self.is_stale(resource, path)
-            if stale:
-                # write the bytes of the resource to the cache location
-                with open(result, 'wb') as f:
-                    f.write(resource.bytes)
-        return result
-
-
-class ResourceBase(object):
-    def __init__(self, finder, name):
-        self.finder = finder
-        self.name = name
-
-
-class Resource(ResourceBase):
-    """
-    A class representing an in-package resource, such as a data file. This is
-    not normally instantiated by user code, but rather by a
-    :class:`ResourceFinder` which manages the resource.
-    """
-    is_container = False        # Backwards compatibility
-
-    def as_stream(self):
-        """
-        Get the resource as a stream.
-
-        This is not a property to make it obvious that it returns a new stream
-        each time.
-        """
-        return self.finder.get_stream(self)
-
-    @cached_property
-    def file_path(self):
-        global cache
-        if cache is None:
-            cache = ResourceCache()
-        return cache.get(self)
-
-    @cached_property
-    def bytes(self):
-        return self.finder.get_bytes(self)
-
-    @cached_property
-    def size(self):
-        return self.finder.get_size(self)
-
-
-class ResourceContainer(ResourceBase):
-    is_container = True     # Backwards compatibility
-
-    @cached_property
-    def resources(self):
-        return self.finder.get_resources(self)
-
-
-class ResourceFinder(object):
-    """
-    Resource finder for file system resources.
-    """
-
-    if sys.platform.startswith('java'):
-        skipped_extensions = ('.pyc', '.pyo', '.class')
-    else:
-        skipped_extensions = ('.pyc', '.pyo')
-
-    def __init__(self, module):
-        self.module = module
-        self.loader = getattr(module, '__loader__', None)
-        self.base = os.path.dirname(getattr(module, '__file__', ''))
-
-    def _adjust_path(self, path):
-        return os.path.realpath(path)
-
-    def _make_path(self, resource_name):
-        # Issue #50: need to preserve type of path on Python 2.x
-        # like os.path._get_sep
-        if isinstance(resource_name, bytes):    # should only happen on 2.x
-            sep = b'/'
-        else:
-            sep = '/'
-        parts = resource_name.split(sep)
-        parts.insert(0, self.base)
-        result = os.path.join(*parts)
-        return self._adjust_path(result)
-
-    def _find(self, path):
-        return os.path.exists(path)
-
-    def get_cache_info(self, resource):
-        return None, resource.path
-
-    def find(self, resource_name):
-        path = self._make_path(resource_name)
-        if not self._find(path):
-            result = None
-        else:
-            if self._is_directory(path):
-                result = ResourceContainer(self, resource_name)
-            else:
-                result = Resource(self, resource_name)
-            result.path = path
-        return result
-
-    def get_stream(self, resource):
-        return open(resource.path, 'rb')
-
-    def get_bytes(self, resource):
-        with open(resource.path, 'rb') as f:
-            return f.read()
-
-    def get_size(self, resource):
-        return os.path.getsize(resource.path)
-
-    def get_resources(self, resource):
-        def allowed(f):
-            return (f != '__pycache__' and not
-                    f.endswith(self.skipped_extensions))
-        return set([f for f in os.listdir(resource.path) if allowed(f)])
-
-    def is_container(self, resource):
-        return self._is_directory(resource.path)
-
-    _is_directory = staticmethod(os.path.isdir)
-
-    def iterator(self, resource_name):
-        resource = self.find(resource_name)
-        if resource is not None:
-            todo = [resource]
-            while todo:
-                resource = todo.pop(0)
-                yield resource
-                if resource.is_container:
-                    rname = resource.name
-                    for name in resource.resources:
-                        if not rname:
-                            new_name = name
-                        else:
-                            new_name = '/'.join([rname, name])
-                        child = self.find(new_name)
-                        if child.is_container:
-                            todo.append(child)
-                        else:
-                            yield child
-
-
-class ZipResourceFinder(ResourceFinder):
-    """
-    Resource finder for resources in .zip files.
-    """
-    def __init__(self, module):
-        super(ZipResourceFinder, self).__init__(module)
-        archive = self.loader.archive
-        self.prefix_len = 1 + len(archive)
-        # PyPy doesn't have a _files attr on zipimporter, and you can't set one
-        if hasattr(self.loader, '_files'):
-            self._files = self.loader._files
-        else:
-            self._files = zipimport._zip_directory_cache[archive]
-        self.index = sorted(self._files)
-
-    def _adjust_path(self, path):
-        return path
-
-    def _find(self, path):
-        path = path[self.prefix_len:]
-        if path in self._files:
-            result = True
-        else:
-            if path and path[-1] != os.sep:
-                path = path + os.sep
-            i = bisect.bisect(self.index, path)
-            try:
-                result = self.index[i].startswith(path)
-            except IndexError:
-                result = False
-        if not result:
-            logger.debug('_find failed: %r %r', path, self.loader.prefix)
-        else:
-            logger.debug('_find worked: %r %r', path, self.loader.prefix)
-        return result
-
-    def get_cache_info(self, resource):
-        prefix = self.loader.archive
-        path = resource.path[1 + len(prefix):]
-        return prefix, path
-
-    def get_bytes(self, resource):
-        return self.loader.get_data(resource.path)
-
-    def get_stream(self, resource):
-        return io.BytesIO(self.get_bytes(resource))
-
-    def get_size(self, resource):
-        path = resource.path[self.prefix_len:]
-        return self._files[path][3]
-
-    def get_resources(self, resource):
-        path = resource.path[self.prefix_len:]
-        if path and path[-1] != os.sep:
-            path += os.sep
-        plen = len(path)
-        result = set()
-        i = bisect.bisect(self.index, path)
-        while i < len(self.index):
-            if not self.index[i].startswith(path):
-                break
-            s = self.index[i][plen:]
-            result.add(s.split(os.sep, 1)[0])   # only immediate children
-            i += 1
-        return result
-
-    def _is_directory(self, path):
-        path = path[self.prefix_len:]
-        if path and path[-1] != os.sep:
-            path += os.sep
-        i = bisect.bisect(self.index, path)
-        try:
-            result = self.index[i].startswith(path)
-        except IndexError:
-            result = False
-        return result
-
-_finder_registry = {
-    type(None): ResourceFinder,
-    zipimport.zipimporter: ZipResourceFinder
-}
-
-try:
-    # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
-    try:
-        import _frozen_importlib_external as _fi
-    except ImportError:
-        import _frozen_importlib as _fi
-    _finder_registry[_fi.SourceFileLoader] = ResourceFinder
-    _finder_registry[_fi.FileFinder] = ResourceFinder
-    del _fi
-except (ImportError, AttributeError):
-    pass
-
-
-def register_finder(loader, finder_maker):
-    _finder_registry[type(loader)] = finder_maker
-
-_finder_cache = {}
-
-
-def finder(package):
-    """
-    Return a resource finder for a package.
-    :param package: The name of the package.
-    :return: A :class:`ResourceFinder` instance for the package.
-    """
-    if package in _finder_cache:
-        result = _finder_cache[package]
-    else:
-        if package not in sys.modules:
-            __import__(package)
-        module = sys.modules[package]
-        path = getattr(module, '__path__', None)
-        if path is None:
-            raise DistlibException('You cannot get a finder for a module, '
-                                   'only for a package')
-        loader = getattr(module, '__loader__', None)
-        finder_maker = _finder_registry.get(type(loader))
-        if finder_maker is None:
-            raise DistlibException('Unable to locate finder for %r' % package)
-        result = finder_maker(module)
-        _finder_cache[package] = result
-    return result
-
-
-_dummy_module = types.ModuleType(str('__dummy__'))
-
-
-def finder_for_path(path):
-    """
-    Return a resource finder for a path, which should represent a container.
-
-    :param path: The path.
-    :return: A :class:`ResourceFinder` instance for the path.
-    """
-    result = None
-    # calls any path hooks, gets importer into cache
-    pkgutil.get_importer(path)
-    loader = sys.path_importer_cache.get(path)
-    finder = _finder_registry.get(type(loader))
-    if finder:
-        module = _dummy_module
-        module.__file__ = os.path.join(path, '')
-        module.__loader__ = loader
-        result = finder(module)
-    return result
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py
deleted file mode 100644
index 5965e24..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/scripts.py
+++ /dev/null
@@ -1,403 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2013-2015 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-from io import BytesIO
-import logging
-import os
-import re
-import struct
-import sys
-
-from .compat import sysconfig, detect_encoding, ZipFile
-from .resources import finder
-from .util import (FileOperator, get_export_entry, convert_path,
-                   get_executable, in_venv)
-
-logger = logging.getLogger(__name__)
-
-_DEFAULT_MANIFEST = '''
-
-
- 
-
- 
- 
- 
- 
- 
- 
- 
- 
-'''.strip()
-
-# check if Python is called on the first line with this expression
-FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
-SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
-import re
-import sys
-from %(module)s import %(import_name)s
-if __name__ == '__main__':
-    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
-    sys.exit(%(func)s())
-'''
-
-
-def _enquote_executable(executable):
-    if ' ' in executable:
-        # make sure we quote only the executable in case of env
-        # for example /usr/bin/env "/dir with spaces/bin/jython"
-        # instead of "/usr/bin/env /dir with spaces/bin/jython"
-        # otherwise whole
-        if executable.startswith('/usr/bin/env '):
-            env, _executable = executable.split(' ', 1)
-            if ' ' in _executable and not _executable.startswith('"'):
-                executable = '%s "%s"' % (env, _executable)
-        else:
-            if not executable.startswith('"'):
-                executable = '"%s"' % executable
-    return executable
-
-
-class ScriptMaker(object):
-    """
-    A class to copy or create scripts from source scripts or callable
-    specifications.
-    """
-    script_template = SCRIPT_TEMPLATE
-
-    executable = None  # for shebangs
-
-    def __init__(self, source_dir, target_dir, add_launchers=True,
-                 dry_run=False, fileop=None):
-        self.source_dir = source_dir
-        self.target_dir = target_dir
-        self.add_launchers = add_launchers
-        self.force = False
-        self.clobber = False
-        # It only makes sense to set mode bits on POSIX.
-        self.set_mode = (os.name == 'posix') or (os.name == 'java' and
-                                                 os._name == 'posix')
-        self.variants = set(('', 'X.Y'))
-        self._fileop = fileop or FileOperator(dry_run)
-
-        self._is_nt = os.name == 'nt' or (
-            os.name == 'java' and os._name == 'nt')
-
-    def _get_alternate_executable(self, executable, options):
-        if options.get('gui', False) and self._is_nt:  # pragma: no cover
-            dn, fn = os.path.split(executable)
-            fn = fn.replace('python', 'pythonw')
-            executable = os.path.join(dn, fn)
-        return executable
-
-    if sys.platform.startswith('java'):  # pragma: no cover
-        def _is_shell(self, executable):
-            """
-            Determine if the specified executable is a script
-            (contains a #! line)
-            """
-            try:
-                with open(executable) as fp:
-                    return fp.read(2) == '#!'
-            except (OSError, IOError):
-                logger.warning('Failed to open %s', executable)
-                return False
-
-        def _fix_jython_executable(self, executable):
-            if self._is_shell(executable):
-                # Workaround for Jython is not needed on Linux systems.
-                import java
-
-                if java.lang.System.getProperty('os.name') == 'Linux':
-                    return executable
-            elif executable.lower().endswith('jython.exe'):
-                # Use wrapper exe for Jython on Windows
-                return executable
-            return '/usr/bin/env %s' % executable
-
-    def _build_shebang(self, executable, post_interp):
-        """
-        Build a shebang line. In the simple case (on Windows, or a shebang line
-        which is not too long or contains spaces) use a simple formulation for
-        the shebang. Otherwise, use /bin/sh as the executable, with a contrived
-        shebang which allows the script to run either under Python or sh, using
-        suitable quoting. Thanks to Harald Nordgren for his input.
-
-        See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
-                  https://hg.mozilla.org/mozilla-central/file/tip/mach
-        """
-        if os.name != 'posix':
-            simple_shebang = True
-        else:
-            # Add 3 for '#!' prefix and newline suffix.
-            shebang_length = len(executable) + len(post_interp) + 3
-            if sys.platform == 'darwin':
-                max_shebang_length = 512
-            else:
-                max_shebang_length = 127
-            simple_shebang = ((b' ' not in executable) and
-                              (shebang_length <= max_shebang_length))
-
-        if simple_shebang:
-            result = b'#!' + executable + post_interp + b'\n'
-        else:
-            result = b'#!/bin/sh\n'
-            result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
-            result += b"' '''"
-        return result
-
-    def _get_shebang(self, encoding, post_interp=b'', options=None):
-        enquote = True
-        if self.executable:
-            executable = self.executable
-            enquote = False     # assume this will be taken care of
-        elif not sysconfig.is_python_build():
-            executable = get_executable()
-        elif in_venv():  # pragma: no cover
-            executable = os.path.join(sysconfig.get_path('scripts'),
-                            'python%s' % sysconfig.get_config_var('EXE'))
-        else:  # pragma: no cover
-            executable = os.path.join(
-                sysconfig.get_config_var('BINDIR'),
-               'python%s%s' % (sysconfig.get_config_var('VERSION'),
-                               sysconfig.get_config_var('EXE')))
-        if options:
-            executable = self._get_alternate_executable(executable, options)
-
-        if sys.platform.startswith('java'):  # pragma: no cover
-            executable = self._fix_jython_executable(executable)
-        # Normalise case for Windows
-        executable = os.path.normcase(executable)
-        # If the user didn't specify an executable, it may be necessary to
-        # cater for executable paths with spaces (not uncommon on Windows)
-        if enquote:
-            executable = _enquote_executable(executable)
-        # Issue #51: don't use fsencode, since we later try to
-        # check that the shebang is decodable using utf-8.
-        executable = executable.encode('utf-8')
-        # in case of IronPython, play safe and enable frames support
-        if (sys.platform == 'cli' and '-X:Frames' not in post_interp
-            and '-X:FullFrames' not in post_interp):  # pragma: no cover
-            post_interp += b' -X:Frames'
-        shebang = self._build_shebang(executable, post_interp)
-        # Python parser starts to read a script using UTF-8 until
-        # it gets a #coding:xxx cookie. The shebang has to be the
-        # first line of a file, the #coding:xxx cookie cannot be
-        # written before. So the shebang has to be decodable from
-        # UTF-8.
-        try:
-            shebang.decode('utf-8')
-        except UnicodeDecodeError:  # pragma: no cover
-            raise ValueError(
-                'The shebang (%r) is not decodable from utf-8' % shebang)
-        # If the script is encoded to a custom encoding (use a
-        # #coding:xxx cookie), the shebang has to be decodable from
-        # the script encoding too.
-        if encoding != 'utf-8':
-            try:
-                shebang.decode(encoding)
-            except UnicodeDecodeError:  # pragma: no cover
-                raise ValueError(
-                    'The shebang (%r) is not decodable '
-                    'from the script encoding (%r)' % (shebang, encoding))
-        return shebang
-
-    def _get_script_text(self, entry):
-        return self.script_template % dict(module=entry.prefix,
-                                           import_name=entry.suffix.split('.')[0],
-                                           func=entry.suffix)
-
-    manifest = _DEFAULT_MANIFEST
-
-    def get_manifest(self, exename):
-        base = os.path.basename(exename)
-        return self.manifest % base
-
-    def _write_script(self, names, shebang, script_bytes, filenames, ext):
-        use_launcher = self.add_launchers and self._is_nt
-        linesep = os.linesep.encode('utf-8')
-        if not shebang.endswith(linesep):
-            shebang += linesep
-        if not use_launcher:
-            script_bytes = shebang + script_bytes
-        else:  # pragma: no cover
-            if ext == 'py':
-                launcher = self._get_launcher('t')
-            else:
-                launcher = self._get_launcher('w')
-            stream = BytesIO()
-            with ZipFile(stream, 'w') as zf:
-                zf.writestr('__main__.py', script_bytes)
-            zip_data = stream.getvalue()
-            script_bytes = launcher + shebang + zip_data
-        for name in names:
-            outname = os.path.join(self.target_dir, name)
-            if use_launcher:  # pragma: no cover
-                n, e = os.path.splitext(outname)
-                if e.startswith('.py'):
-                    outname = n
-                outname = '%s.exe' % outname
-                try:
-                    self._fileop.write_binary_file(outname, script_bytes)
-                except Exception:
-                    # Failed writing an executable - it might be in use.
-                    logger.warning('Failed to write executable - trying to '
-                                   'use .deleteme logic')
-                    dfname = '%s.deleteme' % outname
-                    if os.path.exists(dfname):
-                        os.remove(dfname)       # Not allowed to fail here
-                    os.rename(outname, dfname)  # nor here
-                    self._fileop.write_binary_file(outname, script_bytes)
-                    logger.debug('Able to replace executable using '
-                                 '.deleteme logic')
-                    try:
-                        os.remove(dfname)
-                    except Exception:
-                        pass    # still in use - ignore error
-            else:
-                if self._is_nt and not outname.endswith('.' + ext):  # pragma: no cover
-                    outname = '%s.%s' % (outname, ext)
-                if os.path.exists(outname) and not self.clobber:
-                    logger.warning('Skipping existing file %s', outname)
-                    continue
-                self._fileop.write_binary_file(outname, script_bytes)
-                if self.set_mode:
-                    self._fileop.set_executable_mode([outname])
-            filenames.append(outname)
-
-    def _make_script(self, entry, filenames, options=None):
-        post_interp = b''
-        if options:
-            args = options.get('interpreter_args', [])
-            if args:
-                args = ' %s' % ' '.join(args)
-                post_interp = args.encode('utf-8')
-        shebang = self._get_shebang('utf-8', post_interp, options=options)
-        script = self._get_script_text(entry).encode('utf-8')
-        name = entry.name
-        scriptnames = set()
-        if '' in self.variants:
-            scriptnames.add(name)
-        if 'X' in self.variants:
-            scriptnames.add('%s%s' % (name, sys.version[0]))
-        if 'X.Y' in self.variants:
-            scriptnames.add('%s-%s' % (name, sys.version[:3]))
-        if options and options.get('gui', False):
-            ext = 'pyw'
-        else:
-            ext = 'py'
-        self._write_script(scriptnames, shebang, script, filenames, ext)
-
-    def _copy_script(self, script, filenames):
-        adjust = False
-        script = os.path.join(self.source_dir, convert_path(script))
-        outname = os.path.join(self.target_dir, os.path.basename(script))
-        if not self.force and not self._fileop.newer(script, outname):
-            logger.debug('not copying %s (up-to-date)', script)
-            return
-
-        # Always open the file, but ignore failures in dry-run mode --
-        # that way, we'll get accurate feedback if we can read the
-        # script.
-        try:
-            f = open(script, 'rb')
-        except IOError:  # pragma: no cover
-            if not self.dry_run:
-                raise
-            f = None
-        else:
-            first_line = f.readline()
-            if not first_line:  # pragma: no cover
-                logger.warning('%s: %s is an empty file (skipping)',
-                               self.get_command_name(),  script)
-                return
-
-            match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
-            if match:
-                adjust = True
-                post_interp = match.group(1) or b''
-
-        if not adjust:
-            if f:
-                f.close()
-            self._fileop.copy_file(script, outname)
-            if self.set_mode:
-                self._fileop.set_executable_mode([outname])
-            filenames.append(outname)
-        else:
-            logger.info('copying and adjusting %s -> %s', script,
-                        self.target_dir)
-            if not self._fileop.dry_run:
-                encoding, lines = detect_encoding(f.readline)
-                f.seek(0)
-                shebang = self._get_shebang(encoding, post_interp)
-                if b'pythonw' in first_line:  # pragma: no cover
-                    ext = 'pyw'
-                else:
-                    ext = 'py'
-                n = os.path.basename(outname)
-                self._write_script([n], shebang, f.read(), filenames, ext)
-            if f:
-                f.close()
-
-    @property
-    def dry_run(self):
-        return self._fileop.dry_run
-
-    @dry_run.setter
-    def dry_run(self, value):
-        self._fileop.dry_run = value
-
-    if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):  # pragma: no cover
-        # Executable launcher support.
-        # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
-
-        def _get_launcher(self, kind):
-            if struct.calcsize('P') == 8:   # 64-bit
-                bits = '64'
-            else:
-                bits = '32'
-            name = '%s%s.exe' % (kind, bits)
-            # Issue 31: don't hardcode an absolute package name, but
-            # determine it relative to the current package
-            distlib_package = __name__.rsplit('.', 1)[0]
-            result = finder(distlib_package).find(name).bytes
-            return result
-
-    # Public API follows
-
-    def make(self, specification, options=None):
-        """
-        Make a script.
-
-        :param specification: The specification, which is either a valid export
-                              entry specification (to make a script from a
-                              callable) or a filename (to make a script by
-                              copying from a source location).
-        :param options: A dictionary of options controlling script generation.
-        :return: A list of all absolute pathnames written to.
-        """
-        filenames = []
-        entry = get_export_entry(specification)
-        if entry is None:
-            self._copy_script(specification, filenames)
-        else:
-            self._make_script(entry, filenames, options=options)
-        return filenames
-
-    def make_multiple(self, specifications, options=None):
-        """
-        Take a list of specifications and make scripts from them,
-        :param specifications: A list of specifications.
-        :return: A list of all absolute pathnames written to,
-        """
-        filenames = []
-        for specification in specifications:
-            filenames.extend(self.make(specification, options))
-        return filenames
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe
deleted file mode 100644
index 5d5bce1..0000000
Binary files a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t32.exe and /dev/null differ
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe
deleted file mode 100644
index 039ce44..0000000
Binary files a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/t64.exe and /dev/null differ
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/util.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/util.py
deleted file mode 100644
index e851146..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/util.py
+++ /dev/null
@@ -1,1760 +0,0 @@
-#
-# Copyright (C) 2012-2017 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-import codecs
-from collections import deque
-import contextlib
-import csv
-from glob import iglob as std_iglob
-import io
-import json
-import logging
-import os
-import py_compile
-import re
-import socket
-try:
-    import ssl
-except ImportError:  # pragma: no cover
-    ssl = None
-import subprocess
-import sys
-import tarfile
-import tempfile
-import textwrap
-
-try:
-    import threading
-except ImportError:  # pragma: no cover
-    import dummy_threading as threading
-import time
-
-from . import DistlibException
-from .compat import (string_types, text_type, shutil, raw_input, StringIO,
-                     cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
-                     splittype, HTTPHandler, BaseConfigurator, valid_ident,
-                     Container, configparser, URLError, ZipFile, fsdecode,
-                     unquote, urlparse)
-
-logger = logging.getLogger(__name__)
-
-#
-# Requirement parsing code as per PEP 508
-#
-
-IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
-VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
-COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
-MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
-OR = re.compile(r'^or\b\s*')
-AND = re.compile(r'^and\b\s*')
-NON_SPACE = re.compile(r'(\S+)\s*')
-STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
-
-
-def parse_marker(marker_string):
-    """
-    Parse a marker string and return a dictionary containing a marker expression.
-
-    The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
-    the expression grammar, or strings. A string contained in quotes is to be
-    interpreted as a literal string, and a string not contained in quotes is a
-    variable (such as os_name).
-    """
-    def marker_var(remaining):
-        # either identifier, or literal string
-        m = IDENTIFIER.match(remaining)
-        if m:
-            result = m.groups()[0]
-            remaining = remaining[m.end():]
-        elif not remaining:
-            raise SyntaxError('unexpected end of input')
-        else:
-            q = remaining[0]
-            if q not in '\'"':
-                raise SyntaxError('invalid expression: %s' % remaining)
-            oq = '\'"'.replace(q, '')
-            remaining = remaining[1:]
-            parts = [q]
-            while remaining:
-                # either a string chunk, or oq, or q to terminate
-                if remaining[0] == q:
-                    break
-                elif remaining[0] == oq:
-                    parts.append(oq)
-                    remaining = remaining[1:]
-                else:
-                    m = STRING_CHUNK.match(remaining)
-                    if not m:
-                        raise SyntaxError('error in string literal: %s' % remaining)
-                    parts.append(m.groups()[0])
-                    remaining = remaining[m.end():]
-            else:
-                s = ''.join(parts)
-                raise SyntaxError('unterminated string: %s' % s)
-            parts.append(q)
-            result = ''.join(parts)
-            remaining = remaining[1:].lstrip() # skip past closing quote
-        return result, remaining
-
-    def marker_expr(remaining):
-        if remaining and remaining[0] == '(':
-            result, remaining = marker(remaining[1:].lstrip())
-            if remaining[0] != ')':
-                raise SyntaxError('unterminated parenthesis: %s' % remaining)
-            remaining = remaining[1:].lstrip()
-        else:
-            lhs, remaining = marker_var(remaining)
-            while remaining:
-                m = MARKER_OP.match(remaining)
-                if not m:
-                    break
-                op = m.groups()[0]
-                remaining = remaining[m.end():]
-                rhs, remaining = marker_var(remaining)
-                lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
-            result = lhs
-        return result, remaining
-
-    def marker_and(remaining):
-        lhs, remaining = marker_expr(remaining)
-        while remaining:
-            m = AND.match(remaining)
-            if not m:
-                break
-            remaining = remaining[m.end():]
-            rhs, remaining = marker_expr(remaining)
-            lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
-        return lhs, remaining
-
-    def marker(remaining):
-        lhs, remaining = marker_and(remaining)
-        while remaining:
-            m = OR.match(remaining)
-            if not m:
-                break
-            remaining = remaining[m.end():]
-            rhs, remaining = marker_and(remaining)
-            lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
-        return lhs, remaining
-
-    return marker(marker_string)
-
-
-def parse_requirement(req):
-    """
-    Parse a requirement passed in as a string. Return a Container
-    whose attributes contain the various parts of the requirement.
-    """
-    remaining = req.strip()
-    if not remaining or remaining.startswith('#'):
-        return None
-    m = IDENTIFIER.match(remaining)
-    if not m:
-        raise SyntaxError('name expected: %s' % remaining)
-    distname = m.groups()[0]
-    remaining = remaining[m.end():]
-    extras = mark_expr = versions = uri = None
-    if remaining and remaining[0] == '[':
-        i = remaining.find(']', 1)
-        if i < 0:
-            raise SyntaxError('unterminated extra: %s' % remaining)
-        s = remaining[1:i]
-        remaining = remaining[i + 1:].lstrip()
-        extras = []
-        while s:
-            m = IDENTIFIER.match(s)
-            if not m:
-                raise SyntaxError('malformed extra: %s' % s)
-            extras.append(m.groups()[0])
-            s = s[m.end():]
-            if not s:
-                break
-            if s[0] != ',':
-                raise SyntaxError('comma expected in extras: %s' % s)
-            s = s[1:].lstrip()
-        if not extras:
-            extras = None
-    if remaining:
-        if remaining[0] == '@':
-            # it's a URI
-            remaining = remaining[1:].lstrip()
-            m = NON_SPACE.match(remaining)
-            if not m:
-                raise SyntaxError('invalid URI: %s' % remaining)
-            uri = m.groups()[0]
-            t = urlparse(uri)
-            # there are issues with Python and URL parsing, so this test
-            # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
-            # always parse invalid URLs correctly - it should raise
-            # exceptions for malformed URLs
-            if not (t.scheme and t.netloc):
-                raise SyntaxError('Invalid URL: %s' % uri)
-            remaining = remaining[m.end():].lstrip()
-        else:
-
-            def get_versions(ver_remaining):
-                """
-                Return a list of operator, version tuples if any are
-                specified, else None.
-                """
-                m = COMPARE_OP.match(ver_remaining)
-                versions = None
-                if m:
-                    versions = []
-                    while True:
-                        op = m.groups()[0]
-                        ver_remaining = ver_remaining[m.end():]
-                        m = VERSION_IDENTIFIER.match(ver_remaining)
-                        if not m:
-                            raise SyntaxError('invalid version: %s' % ver_remaining)
-                        v = m.groups()[0]
-                        versions.append((op, v))
-                        ver_remaining = ver_remaining[m.end():]
-                        if not ver_remaining or ver_remaining[0] != ',':
-                            break
-                        ver_remaining = ver_remaining[1:].lstrip()
-                        m = COMPARE_OP.match(ver_remaining)
-                        if not m:
-                            raise SyntaxError('invalid constraint: %s' % ver_remaining)
-                    if not versions:
-                        versions = None
-                return versions, ver_remaining
-
-            if remaining[0] != '(':
-                versions, remaining = get_versions(remaining)
-            else:
-                i = remaining.find(')', 1)
-                if i < 0:
-                    raise SyntaxError('unterminated parenthesis: %s' % remaining)
-                s = remaining[1:i]
-                remaining = remaining[i + 1:].lstrip()
-                # As a special diversion from PEP 508, allow a version number
-                # a.b.c in parentheses as a synonym for ~= a.b.c (because this
-                # is allowed in earlier PEPs)
-                if COMPARE_OP.match(s):
-                    versions, _ = get_versions(s)
-                else:
-                    m = VERSION_IDENTIFIER.match(s)
-                    if not m:
-                        raise SyntaxError('invalid constraint: %s' % s)
-                    v = m.groups()[0]
-                    s = s[m.end():].lstrip()
-                    if s:
-                        raise SyntaxError('invalid constraint: %s' % s)
-                    versions = [('~=', v)]
-
-    if remaining:
-        if remaining[0] != ';':
-            raise SyntaxError('invalid requirement: %s' % remaining)
-        remaining = remaining[1:].lstrip()
-
-        mark_expr, remaining = parse_marker(remaining)
-
-    if remaining and remaining[0] != '#':
-        raise SyntaxError('unexpected trailing data: %s' % remaining)
-
-    if not versions:
-        rs = distname
-    else:
-        rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
-    return Container(name=distname, extras=extras, constraints=versions,
-                     marker=mark_expr, url=uri, requirement=rs)
-
-
-def get_resources_dests(resources_root, rules):
-    """Find destinations for resources files"""
-
-    def get_rel_path(root, path):
-        # normalizes and returns a lstripped-/-separated path
-        root = root.replace(os.path.sep, '/')
-        path = path.replace(os.path.sep, '/')
-        assert path.startswith(root)
-        return path[len(root):].lstrip('/')
-
-    destinations = {}
-    for base, suffix, dest in rules:
-        prefix = os.path.join(resources_root, base)
-        for abs_base in iglob(prefix):
-            abs_glob = os.path.join(abs_base, suffix)
-            for abs_path in iglob(abs_glob):
-                resource_file = get_rel_path(resources_root, abs_path)
-                if dest is None:  # remove the entry if it was here
-                    destinations.pop(resource_file, None)
-                else:
-                    rel_path = get_rel_path(abs_base, abs_path)
-                    rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
-                    destinations[resource_file] = rel_dest + '/' + rel_path
-    return destinations
-
-
-def in_venv():
-    if hasattr(sys, 'real_prefix'):
-        # virtualenv venvs
-        result = True
-    else:
-        # PEP 405 venvs
-        result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
-    return result
-
-
-def get_executable():
-# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
-# changes to the stub launcher mean that sys.executable always points
-# to the stub on OS X
-#    if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
-#                                     in os.environ):
-#        result =  os.environ['__PYVENV_LAUNCHER__']
-#    else:
-#        result = sys.executable
-#    return result
-    result = os.path.normcase(sys.executable)
-    if not isinstance(result, text_type):
-        result = fsdecode(result)
-    return result
-
-
-def proceed(prompt, allowed_chars, error_prompt=None, default=None):
-    p = prompt
-    while True:
-        s = raw_input(p)
-        p = prompt
-        if not s and default:
-            s = default
-        if s:
-            c = s[0].lower()
-            if c in allowed_chars:
-                break
-            if error_prompt:
-                p = '%c: %s\n%s' % (c, error_prompt, prompt)
-    return c
-
-
-def extract_by_key(d, keys):
-    if isinstance(keys, string_types):
-        keys = keys.split()
-    result = {}
-    for key in keys:
-        if key in d:
-            result[key] = d[key]
-    return result
-
-def read_exports(stream):
-    if sys.version_info[0] >= 3:
-        # needs to be a text stream
-        stream = codecs.getreader('utf-8')(stream)
-    # Try to load as JSON, falling back on legacy format
-    data = stream.read()
-    stream = StringIO(data)
-    try:
-        jdata = json.load(stream)
-        result = jdata['extensions']['python.exports']['exports']
-        for group, entries in result.items():
-            for k, v in entries.items():
-                s = '%s = %s' % (k, v)
-                entry = get_export_entry(s)
-                assert entry is not None
-                entries[k] = entry
-        return result
-    except Exception:
-        stream.seek(0, 0)
-
-    def read_stream(cp, stream):
-        if hasattr(cp, 'read_file'):
-            cp.read_file(stream)
-        else:
-            cp.readfp(stream)
-
-    cp = configparser.ConfigParser()
-    try:
-        read_stream(cp, stream)
-    except configparser.MissingSectionHeaderError:
-        stream.close()
-        data = textwrap.dedent(data)
-        stream = StringIO(data)
-        read_stream(cp, stream)
-
-    result = {}
-    for key in cp.sections():
-        result[key] = entries = {}
-        for name, value in cp.items(key):
-            s = '%s = %s' % (name, value)
-            entry = get_export_entry(s)
-            assert entry is not None
-            #entry.dist = self
-            entries[name] = entry
-    return result
-
-
-def write_exports(exports, stream):
-    if sys.version_info[0] >= 3:
-        # needs to be a text stream
-        stream = codecs.getwriter('utf-8')(stream)
-    cp = configparser.ConfigParser()
-    for k, v in exports.items():
-        # TODO check k, v for valid values
-        cp.add_section(k)
-        for entry in v.values():
-            if entry.suffix is None:
-                s = entry.prefix
-            else:
-                s = '%s:%s' % (entry.prefix, entry.suffix)
-            if entry.flags:
-                s = '%s [%s]' % (s, ', '.join(entry.flags))
-            cp.set(k, entry.name, s)
-    cp.write(stream)
-
-
-@contextlib.contextmanager
-def tempdir():
-    td = tempfile.mkdtemp()
-    try:
-        yield td
-    finally:
-        shutil.rmtree(td)
-
-@contextlib.contextmanager
-def chdir(d):
-    cwd = os.getcwd()
-    try:
-        os.chdir(d)
-        yield
-    finally:
-        os.chdir(cwd)
-
-
-@contextlib.contextmanager
-def socket_timeout(seconds=15):
-    cto = socket.getdefaulttimeout()
-    try:
-        socket.setdefaulttimeout(seconds)
-        yield
-    finally:
-        socket.setdefaulttimeout(cto)
-
-
-class cached_property(object):
-    def __init__(self, func):
-        self.func = func
-        #for attr in ('__name__', '__module__', '__doc__'):
-        #    setattr(self, attr, getattr(func, attr, None))
-
-    def __get__(self, obj, cls=None):
-        if obj is None:
-            return self
-        value = self.func(obj)
-        object.__setattr__(obj, self.func.__name__, value)
-        #obj.__dict__[self.func.__name__] = value = self.func(obj)
-        return value
-
-def convert_path(pathname):
-    """Return 'pathname' as a name that will work on the native filesystem.
-
-    The path is split on '/' and put back together again using the current
-    directory separator.  Needed because filenames in the setup script are
-    always supplied in Unix style, and have to be converted to the local
-    convention before we can actually use them in the filesystem.  Raises
-    ValueError on non-Unix-ish systems if 'pathname' either starts or
-    ends with a slash.
-    """
-    if os.sep == '/':
-        return pathname
-    if not pathname:
-        return pathname
-    if pathname[0] == '/':
-        raise ValueError("path '%s' cannot be absolute" % pathname)
-    if pathname[-1] == '/':
-        raise ValueError("path '%s' cannot end with '/'" % pathname)
-
-    paths = pathname.split('/')
-    while os.curdir in paths:
-        paths.remove(os.curdir)
-    if not paths:
-        return os.curdir
-    return os.path.join(*paths)
-
-
-class FileOperator(object):
-    def __init__(self, dry_run=False):
-        self.dry_run = dry_run
-        self.ensured = set()
-        self._init_record()
-
-    def _init_record(self):
-        self.record = False
-        self.files_written = set()
-        self.dirs_created = set()
-
-    def record_as_written(self, path):
-        if self.record:
-            self.files_written.add(path)
-
-    def newer(self, source, target):
-        """Tell if the target is newer than the source.
-
-        Returns true if 'source' exists and is more recently modified than
-        'target', or if 'source' exists and 'target' doesn't.
-
-        Returns false if both exist and 'target' is the same age or younger
-        than 'source'. Raise PackagingFileError if 'source' does not exist.
-
-        Note that this test is not very accurate: files created in the same
-        second will have the same "age".
-        """
-        if not os.path.exists(source):
-            raise DistlibException("file '%r' does not exist" %
-                                   os.path.abspath(source))
-        if not os.path.exists(target):
-            return True
-
-        return os.stat(source).st_mtime > os.stat(target).st_mtime
-
-    def copy_file(self, infile, outfile, check=True):
-        """Copy a file respecting dry-run and force flags.
-        """
-        self.ensure_dir(os.path.dirname(outfile))
-        logger.info('Copying %s to %s', infile, outfile)
-        if not self.dry_run:
-            msg = None
-            if check:
-                if os.path.islink(outfile):
-                    msg = '%s is a symlink' % outfile
-                elif os.path.exists(outfile) and not os.path.isfile(outfile):
-                    msg = '%s is a non-regular file' % outfile
-            if msg:
-                raise ValueError(msg + ' which would be overwritten')
-            shutil.copyfile(infile, outfile)
-        self.record_as_written(outfile)
-
-    def copy_stream(self, instream, outfile, encoding=None):
-        assert not os.path.isdir(outfile)
-        self.ensure_dir(os.path.dirname(outfile))
-        logger.info('Copying stream %s to %s', instream, outfile)
-        if not self.dry_run:
-            if encoding is None:
-                outstream = open(outfile, 'wb')
-            else:
-                outstream = codecs.open(outfile, 'w', encoding=encoding)
-            try:
-                shutil.copyfileobj(instream, outstream)
-            finally:
-                outstream.close()
-        self.record_as_written(outfile)
-
-    def write_binary_file(self, path, data):
-        self.ensure_dir(os.path.dirname(path))
-        if not self.dry_run:
-            if os.path.exists(path):
-                os.remove(path)
-            with open(path, 'wb') as f:
-                f.write(data)
-        self.record_as_written(path)
-
-    def write_text_file(self, path, data, encoding):
-        self.write_binary_file(path, data.encode(encoding))
-
-    def set_mode(self, bits, mask, files):
-        if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
-            # Set the executable bits (owner, group, and world) on
-            # all the files specified.
-            for f in files:
-                if self.dry_run:
-                    logger.info("changing mode of %s", f)
-                else:
-                    mode = (os.stat(f).st_mode | bits) & mask
-                    logger.info("changing mode of %s to %o", f, mode)
-                    os.chmod(f, mode)
-
-    set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
-
-    def ensure_dir(self, path):
-        path = os.path.abspath(path)
-        if path not in self.ensured and not os.path.exists(path):
-            self.ensured.add(path)
-            d, f = os.path.split(path)
-            self.ensure_dir(d)
-            logger.info('Creating %s' % path)
-            if not self.dry_run:
-                os.mkdir(path)
-            if self.record:
-                self.dirs_created.add(path)
-
-    def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
-        dpath = cache_from_source(path, not optimize)
-        logger.info('Byte-compiling %s to %s', path, dpath)
-        if not self.dry_run:
-            if force or self.newer(path, dpath):
-                if not prefix:
-                    diagpath = None
-                else:
-                    assert path.startswith(prefix)
-                    diagpath = path[len(prefix):]
-            compile_kwargs = {}
-            if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
-                compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
-            py_compile.compile(path, dpath, diagpath, True, **compile_kwargs)     # raise error
-        self.record_as_written(dpath)
-        return dpath
-
-    def ensure_removed(self, path):
-        if os.path.exists(path):
-            if os.path.isdir(path) and not os.path.islink(path):
-                logger.debug('Removing directory tree at %s', path)
-                if not self.dry_run:
-                    shutil.rmtree(path)
-                if self.record:
-                    if path in self.dirs_created:
-                        self.dirs_created.remove(path)
-            else:
-                if os.path.islink(path):
-                    s = 'link'
-                else:
-                    s = 'file'
-                logger.debug('Removing %s %s', s, path)
-                if not self.dry_run:
-                    os.remove(path)
-                if self.record:
-                    if path in self.files_written:
-                        self.files_written.remove(path)
-
-    def is_writable(self, path):
-        result = False
-        while not result:
-            if os.path.exists(path):
-                result = os.access(path, os.W_OK)
-                break
-            parent = os.path.dirname(path)
-            if parent == path:
-                break
-            path = parent
-        return result
-
-    def commit(self):
-        """
-        Commit recorded changes, turn off recording, return
-        changes.
-        """
-        assert self.record
-        result = self.files_written, self.dirs_created
-        self._init_record()
-        return result
-
-    def rollback(self):
-        if not self.dry_run:
-            for f in list(self.files_written):
-                if os.path.exists(f):
-                    os.remove(f)
-            # dirs should all be empty now, except perhaps for
-            # __pycache__ subdirs
-            # reverse so that subdirs appear before their parents
-            dirs = sorted(self.dirs_created, reverse=True)
-            for d in dirs:
-                flist = os.listdir(d)
-                if flist:
-                    assert flist == ['__pycache__']
-                    sd = os.path.join(d, flist[0])
-                    os.rmdir(sd)
-                os.rmdir(d)     # should fail if non-empty
-        self._init_record()
-
-def resolve(module_name, dotted_path):
-    if module_name in sys.modules:
-        mod = sys.modules[module_name]
-    else:
-        mod = __import__(module_name)
-    if dotted_path is None:
-        result = mod
-    else:
-        parts = dotted_path.split('.')
-        result = getattr(mod, parts.pop(0))
-        for p in parts:
-            result = getattr(result, p)
-    return result
-
-
-class ExportEntry(object):
-    def __init__(self, name, prefix, suffix, flags):
-        self.name = name
-        self.prefix = prefix
-        self.suffix = suffix
-        self.flags = flags
-
-    @cached_property
-    def value(self):
-        return resolve(self.prefix, self.suffix)
-
-    def __repr__(self):  # pragma: no cover
-        return '' % (self.name, self.prefix,
-                                                self.suffix, self.flags)
-
-    def __eq__(self, other):
-        if not isinstance(other, ExportEntry):
-            result = False
-        else:
-            result = (self.name == other.name and
-                      self.prefix == other.prefix and
-                      self.suffix == other.suffix and
-                      self.flags == other.flags)
-        return result
-
-    __hash__ = object.__hash__
-
-
-ENTRY_RE = re.compile(r'''(?P(\w|[-.+])+)
-                      \s*=\s*(?P(\w+)([:\.]\w+)*)
-                      \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
-                      ''', re.VERBOSE)
-
-def get_export_entry(specification):
-    m = ENTRY_RE.search(specification)
-    if not m:
-        result = None
-        if '[' in specification or ']' in specification:
-            raise DistlibException("Invalid specification "
-                                   "'%s'" % specification)
-    else:
-        d = m.groupdict()
-        name = d['name']
-        path = d['callable']
-        colons = path.count(':')
-        if colons == 0:
-            prefix, suffix = path, None
-        else:
-            if colons != 1:
-                raise DistlibException("Invalid specification "
-                                       "'%s'" % specification)
-            prefix, suffix = path.split(':')
-        flags = d['flags']
-        if flags is None:
-            if '[' in specification or ']' in specification:
-                raise DistlibException("Invalid specification "
-                                       "'%s'" % specification)
-            flags = []
-        else:
-            flags = [f.strip() for f in flags.split(',')]
-        result = ExportEntry(name, prefix, suffix, flags)
-    return result
-
-
-def get_cache_base(suffix=None):
-    """
-    Return the default base location for distlib caches. If the directory does
-    not exist, it is created. Use the suffix provided for the base directory,
-    and default to '.distlib' if it isn't provided.
-
-    On Windows, if LOCALAPPDATA is defined in the environment, then it is
-    assumed to be a directory, and will be the parent directory of the result.
-    On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
-    directory - using os.expanduser('~') - will be the parent directory of
-    the result.
-
-    The result is just the directory '.distlib' in the parent directory as
-    determined above, or with the name specified with ``suffix``.
-    """
-    if suffix is None:
-        suffix = '.distlib'
-    if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
-        result = os.path.expandvars('$localappdata')
-    else:
-        # Assume posix, or old Windows
-        result = os.path.expanduser('~')
-    # we use 'isdir' instead of 'exists', because we want to
-    # fail if there's a file with that name
-    if os.path.isdir(result):
-        usable = os.access(result, os.W_OK)
-        if not usable:
-            logger.warning('Directory exists but is not writable: %s', result)
-    else:
-        try:
-            os.makedirs(result)
-            usable = True
-        except OSError:
-            logger.warning('Unable to create %s', result, exc_info=True)
-            usable = False
-    if not usable:
-        result = tempfile.mkdtemp()
-        logger.warning('Default location unusable, using %s', result)
-    return os.path.join(result, suffix)
-
-
-def path_to_cache_dir(path):
-    """
-    Convert an absolute path to a directory name for use in a cache.
-
-    The algorithm used is:
-
-    #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
-    #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
-    #. ``'.cache'`` is appended.
-    """
-    d, p = os.path.splitdrive(os.path.abspath(path))
-    if d:
-        d = d.replace(':', '---')
-    p = p.replace(os.sep, '--')
-    return d + p + '.cache'
-
-
-def ensure_slash(s):
-    if not s.endswith('/'):
-        return s + '/'
-    return s
-
-
-def parse_credentials(netloc):
-    username = password = None
-    if '@' in netloc:
-        prefix, netloc = netloc.rsplit('@', 1)
-        if ':' not in prefix:
-            username = prefix
-        else:
-            username, password = prefix.split(':', 1)
-    if username:
-        username = unquote(username)
-    if password:
-        password = unquote(password)
-    return username, password, netloc
-
-
-def get_process_umask():
-    result = os.umask(0o22)
-    os.umask(result)
-    return result
-
-def is_string_sequence(seq):
-    result = True
-    i = None
-    for i, s in enumerate(seq):
-        if not isinstance(s, string_types):
-            result = False
-            break
-    assert i is not None
-    return result
-
-PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
-                                      '([a-z0-9_.+-]+)', re.I)
-PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
-
-
-def split_filename(filename, project_name=None):
-    """
-    Extract name, version, python version from a filename (no extension)
-
-    Return name, version, pyver or None
-    """
-    result = None
-    pyver = None
-    filename = unquote(filename).replace(' ', '-')
-    m = PYTHON_VERSION.search(filename)
-    if m:
-        pyver = m.group(1)
-        filename = filename[:m.start()]
-    if project_name and len(filename) > len(project_name) + 1:
-        m = re.match(re.escape(project_name) + r'\b', filename)
-        if m:
-            n = m.end()
-            result = filename[:n], filename[n + 1:], pyver
-    if result is None:
-        m = PROJECT_NAME_AND_VERSION.match(filename)
-        if m:
-            result = m.group(1), m.group(3), pyver
-    return result
-
-# Allow spaces in name because of legacy dists like "Twisted Core"
-NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*'
-                             r'\(\s*(?P[^\s)]+)\)$')
-
-def parse_name_and_version(p):
-    """
-    A utility method used to get name and version from a string.
-
-    From e.g. a Provides-Dist value.
-
-    :param p: A value in a form 'foo (1.0)'
-    :return: The name and version as a tuple.
-    """
-    m = NAME_VERSION_RE.match(p)
-    if not m:
-        raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
-    d = m.groupdict()
-    return d['name'].strip().lower(), d['ver']
-
-def get_extras(requested, available):
-    result = set()
-    requested = set(requested or [])
-    available = set(available or [])
-    if '*' in requested:
-        requested.remove('*')
-        result |= available
-    for r in requested:
-        if r == '-':
-            result.add(r)
-        elif r.startswith('-'):
-            unwanted = r[1:]
-            if unwanted not in available:
-                logger.warning('undeclared extra: %s' % unwanted)
-            if unwanted in result:
-                result.remove(unwanted)
-        else:
-            if r not in available:
-                logger.warning('undeclared extra: %s' % r)
-            result.add(r)
-    return result
-#
-# Extended metadata functionality
-#
-
-def _get_external_data(url):
-    result = {}
-    try:
-        # urlopen might fail if it runs into redirections,
-        # because of Python issue #13696. Fixed in locators
-        # using a custom redirect handler.
-        resp = urlopen(url)
-        headers = resp.info()
-        ct = headers.get('Content-Type')
-        if not ct.startswith('application/json'):
-            logger.debug('Unexpected response for JSON request: %s', ct)
-        else:
-            reader = codecs.getreader('utf-8')(resp)
-            #data = reader.read().decode('utf-8')
-            #result = json.loads(data)
-            result = json.load(reader)
-    except Exception as e:
-        logger.exception('Failed to get external data for %s: %s', url, e)
-    return result
-
-_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
-
-def get_project_data(name):
-    url = '%s/%s/project.json' % (name[0].upper(), name)
-    url = urljoin(_external_data_base_url, url)
-    result = _get_external_data(url)
-    return result
-
-def get_package_data(name, version):
-    url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
-    url = urljoin(_external_data_base_url, url)
-    return _get_external_data(url)
-
-
-class Cache(object):
-    """
-    A class implementing a cache for resources that need to live in the file system
-    e.g. shared libraries. This class was moved from resources to here because it
-    could be used by other modules, e.g. the wheel module.
-    """
-
-    def __init__(self, base):
-        """
-        Initialise an instance.
-
-        :param base: The base directory where the cache should be located.
-        """
-        # we use 'isdir' instead of 'exists', because we want to
-        # fail if there's a file with that name
-        if not os.path.isdir(base):  # pragma: no cover
-            os.makedirs(base)
-        if (os.stat(base).st_mode & 0o77) != 0:
-            logger.warning('Directory \'%s\' is not private', base)
-        self.base = os.path.abspath(os.path.normpath(base))
-
-    def prefix_to_dir(self, prefix):
-        """
-        Converts a resource prefix to a directory name in the cache.
-        """
-        return path_to_cache_dir(prefix)
-
-    def clear(self):
-        """
-        Clear the cache.
-        """
-        not_removed = []
-        for fn in os.listdir(self.base):
-            fn = os.path.join(self.base, fn)
-            try:
-                if os.path.islink(fn) or os.path.isfile(fn):
-                    os.remove(fn)
-                elif os.path.isdir(fn):
-                    shutil.rmtree(fn)
-            except Exception:
-                not_removed.append(fn)
-        return not_removed
-
-
-class EventMixin(object):
-    """
-    A very simple publish/subscribe system.
-    """
-    def __init__(self):
-        self._subscribers = {}
-
-    def add(self, event, subscriber, append=True):
-        """
-        Add a subscriber for an event.
-
-        :param event: The name of an event.
-        :param subscriber: The subscriber to be added (and called when the
-                           event is published).
-        :param append: Whether to append or prepend the subscriber to an
-                       existing subscriber list for the event.
-        """
-        subs = self._subscribers
-        if event not in subs:
-            subs[event] = deque([subscriber])
-        else:
-            sq = subs[event]
-            if append:
-                sq.append(subscriber)
-            else:
-                sq.appendleft(subscriber)
-
-    def remove(self, event, subscriber):
-        """
-        Remove a subscriber for an event.
-
-        :param event: The name of an event.
-        :param subscriber: The subscriber to be removed.
-        """
-        subs = self._subscribers
-        if event not in subs:
-            raise ValueError('No subscribers: %r' % event)
-        subs[event].remove(subscriber)
-
-    def get_subscribers(self, event):
-        """
-        Return an iterator for the subscribers for an event.
-        :param event: The event to return subscribers for.
-        """
-        return iter(self._subscribers.get(event, ()))
-
-    def publish(self, event, *args, **kwargs):
-        """
-        Publish a event and return a list of values returned by its
-        subscribers.
-
-        :param event: The event to publish.
-        :param args: The positional arguments to pass to the event's
-                     subscribers.
-        :param kwargs: The keyword arguments to pass to the event's
-                       subscribers.
-        """
-        result = []
-        for subscriber in self.get_subscribers(event):
-            try:
-                value = subscriber(event, *args, **kwargs)
-            except Exception:
-                logger.exception('Exception during event publication')
-                value = None
-            result.append(value)
-        logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
-                     event, args, kwargs, result)
-        return result
-
-#
-# Simple sequencing
-#
-class Sequencer(object):
-    def __init__(self):
-        self._preds = {}
-        self._succs = {}
-        self._nodes = set()     # nodes with no preds/succs
-
-    def add_node(self, node):
-        self._nodes.add(node)
-
-    def remove_node(self, node, edges=False):
-        if node in self._nodes:
-            self._nodes.remove(node)
-        if edges:
-            for p in set(self._preds.get(node, ())):
-                self.remove(p, node)
-            for s in set(self._succs.get(node, ())):
-                self.remove(node, s)
-            # Remove empties
-            for k, v in list(self._preds.items()):
-                if not v:
-                    del self._preds[k]
-            for k, v in list(self._succs.items()):
-                if not v:
-                    del self._succs[k]
-
-    def add(self, pred, succ):
-        assert pred != succ
-        self._preds.setdefault(succ, set()).add(pred)
-        self._succs.setdefault(pred, set()).add(succ)
-
-    def remove(self, pred, succ):
-        assert pred != succ
-        try:
-            preds = self._preds[succ]
-            succs = self._succs[pred]
-        except KeyError:  # pragma: no cover
-            raise ValueError('%r not a successor of anything' % succ)
-        try:
-            preds.remove(pred)
-            succs.remove(succ)
-        except KeyError:  # pragma: no cover
-            raise ValueError('%r not a successor of %r' % (succ, pred))
-
-    def is_step(self, step):
-        return (step in self._preds or step in self._succs or
-                step in self._nodes)
-
-    def get_steps(self, final):
-        if not self.is_step(final):
-            raise ValueError('Unknown: %r' % final)
-        result = []
-        todo = []
-        seen = set()
-        todo.append(final)
-        while todo:
-            step = todo.pop(0)
-            if step in seen:
-                # if a step was already seen,
-                # move it to the end (so it will appear earlier
-                # when reversed on return) ... but not for the
-                # final step, as that would be confusing for
-                # users
-                if step != final:
-                    result.remove(step)
-                    result.append(step)
-            else:
-                seen.add(step)
-                result.append(step)
-                preds = self._preds.get(step, ())
-                todo.extend(preds)
-        return reversed(result)
-
-    @property
-    def strong_connections(self):
-        #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
-        index_counter = [0]
-        stack = []
-        lowlinks = {}
-        index = {}
-        result = []
-
-        graph = self._succs
-
-        def strongconnect(node):
-            # set the depth index for this node to the smallest unused index
-            index[node] = index_counter[0]
-            lowlinks[node] = index_counter[0]
-            index_counter[0] += 1
-            stack.append(node)
-
-            # Consider successors
-            try:
-                successors = graph[node]
-            except Exception:
-                successors = []
-            for successor in successors:
-                if successor not in lowlinks:
-                    # Successor has not yet been visited
-                    strongconnect(successor)
-                    lowlinks[node] = min(lowlinks[node],lowlinks[successor])
-                elif successor in stack:
-                    # the successor is in the stack and hence in the current
-                    # strongly connected component (SCC)
-                    lowlinks[node] = min(lowlinks[node],index[successor])
-
-            # If `node` is a root node, pop the stack and generate an SCC
-            if lowlinks[node] == index[node]:
-                connected_component = []
-
-                while True:
-                    successor = stack.pop()
-                    connected_component.append(successor)
-                    if successor == node: break
-                component = tuple(connected_component)
-                # storing the result
-                result.append(component)
-
-        for node in graph:
-            if node not in lowlinks:
-                strongconnect(node)
-
-        return result
-
-    @property
-    def dot(self):
-        result = ['digraph G {']
-        for succ in self._preds:
-            preds = self._preds[succ]
-            for pred in preds:
-                result.append('  %s -> %s;' % (pred, succ))
-        for node in self._nodes:
-            result.append('  %s;' % node)
-        result.append('}')
-        return '\n'.join(result)
-
-#
-# Unarchiving functionality for zip, tar, tgz, tbz, whl
-#
-
-ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
-                      '.tgz', '.tbz', '.whl')
-
-def unarchive(archive_filename, dest_dir, format=None, check=True):
-
-    def check_path(path):
-        if not isinstance(path, text_type):
-            path = path.decode('utf-8')
-        p = os.path.abspath(os.path.join(dest_dir, path))
-        if not p.startswith(dest_dir) or p[plen] != os.sep:
-            raise ValueError('path outside destination: %r' % p)
-
-    dest_dir = os.path.abspath(dest_dir)
-    plen = len(dest_dir)
-    archive = None
-    if format is None:
-        if archive_filename.endswith(('.zip', '.whl')):
-            format = 'zip'
-        elif archive_filename.endswith(('.tar.gz', '.tgz')):
-            format = 'tgz'
-            mode = 'r:gz'
-        elif archive_filename.endswith(('.tar.bz2', '.tbz')):
-            format = 'tbz'
-            mode = 'r:bz2'
-        elif archive_filename.endswith('.tar'):
-            format = 'tar'
-            mode = 'r'
-        else:  # pragma: no cover
-            raise ValueError('Unknown format for %r' % archive_filename)
-    try:
-        if format == 'zip':
-            archive = ZipFile(archive_filename, 'r')
-            if check:
-                names = archive.namelist()
-                for name in names:
-                    check_path(name)
-        else:
-            archive = tarfile.open(archive_filename, mode)
-            if check:
-                names = archive.getnames()
-                for name in names:
-                    check_path(name)
-        if format != 'zip' and sys.version_info[0] < 3:
-            # See Python issue 17153. If the dest path contains Unicode,
-            # tarfile extraction fails on Python 2.x if a member path name
-            # contains non-ASCII characters - it leads to an implicit
-            # bytes -> unicode conversion using ASCII to decode.
-            for tarinfo in archive.getmembers():
-                if not isinstance(tarinfo.name, text_type):
-                    tarinfo.name = tarinfo.name.decode('utf-8')
-        archive.extractall(dest_dir)
-
-    finally:
-        if archive:
-            archive.close()
-
-
-def zip_dir(directory):
-    """zip a directory tree into a BytesIO object"""
-    result = io.BytesIO()
-    dlen = len(directory)
-    with ZipFile(result, "w") as zf:
-        for root, dirs, files in os.walk(directory):
-            for name in files:
-                full = os.path.join(root, name)
-                rel = root[dlen:]
-                dest = os.path.join(rel, name)
-                zf.write(full, dest)
-    return result
-
-#
-# Simple progress bar
-#
-
-UNITS = ('', 'K', 'M', 'G','T','P')
-
-
-class Progress(object):
-    unknown = 'UNKNOWN'
-
-    def __init__(self, minval=0, maxval=100):
-        assert maxval is None or maxval >= minval
-        self.min = self.cur = minval
-        self.max = maxval
-        self.started = None
-        self.elapsed = 0
-        self.done = False
-
-    def update(self, curval):
-        assert self.min <= curval
-        assert self.max is None or curval <= self.max
-        self.cur = curval
-        now = time.time()
-        if self.started is None:
-            self.started = now
-        else:
-            self.elapsed = now - self.started
-
-    def increment(self, incr):
-        assert incr >= 0
-        self.update(self.cur + incr)
-
-    def start(self):
-        self.update(self.min)
-        return self
-
-    def stop(self):
-        if self.max is not None:
-            self.update(self.max)
-        self.done = True
-
-    @property
-    def maximum(self):
-        return self.unknown if self.max is None else self.max
-
-    @property
-    def percentage(self):
-        if self.done:
-            result = '100 %'
-        elif self.max is None:
-            result = ' ?? %'
-        else:
-            v = 100.0 * (self.cur - self.min) / (self.max - self.min)
-            result = '%3d %%' % v
-        return result
-
-    def format_duration(self, duration):
-        if (duration <= 0) and self.max is None or self.cur == self.min:
-            result = '??:??:??'
-        #elif duration < 1:
-        #    result = '--:--:--'
-        else:
-            result = time.strftime('%H:%M:%S', time.gmtime(duration))
-        return result
-
-    @property
-    def ETA(self):
-        if self.done:
-            prefix = 'Done'
-            t = self.elapsed
-            #import pdb; pdb.set_trace()
-        else:
-            prefix = 'ETA '
-            if self.max is None:
-                t = -1
-            elif self.elapsed == 0 or (self.cur == self.min):
-                t = 0
-            else:
-                #import pdb; pdb.set_trace()
-                t = float(self.max - self.min)
-                t /= self.cur - self.min
-                t = (t - 1) * self.elapsed
-        return '%s: %s' % (prefix, self.format_duration(t))
-
-    @property
-    def speed(self):
-        if self.elapsed == 0:
-            result = 0.0
-        else:
-            result = (self.cur - self.min) / self.elapsed
-        for unit in UNITS:
-            if result < 1000:
-                break
-            result /= 1000.0
-        return '%d %sB/s' % (result, unit)
-
-#
-# Glob functionality
-#
-
-RICH_GLOB = re.compile(r'\{([^}]*)\}')
-_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
-_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
-
-
-def iglob(path_glob):
-    """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
-    if _CHECK_RECURSIVE_GLOB.search(path_glob):
-        msg = """invalid glob %r: recursive glob "**" must be used alone"""
-        raise ValueError(msg % path_glob)
-    if _CHECK_MISMATCH_SET.search(path_glob):
-        msg = """invalid glob %r: mismatching set marker '{' or '}'"""
-        raise ValueError(msg % path_glob)
-    return _iglob(path_glob)
-
-
-def _iglob(path_glob):
-    rich_path_glob = RICH_GLOB.split(path_glob, 1)
-    if len(rich_path_glob) > 1:
-        assert len(rich_path_glob) == 3, rich_path_glob
-        prefix, set, suffix = rich_path_glob
-        for item in set.split(','):
-            for path in _iglob(''.join((prefix, item, suffix))):
-                yield path
-    else:
-        if '**' not in path_glob:
-            for item in std_iglob(path_glob):
-                yield item
-        else:
-            prefix, radical = path_glob.split('**', 1)
-            if prefix == '':
-                prefix = '.'
-            if radical == '':
-                radical = '*'
-            else:
-                # we support both
-                radical = radical.lstrip('/')
-                radical = radical.lstrip('\\')
-            for path, dir, files in os.walk(prefix):
-                path = os.path.normpath(path)
-                for fn in _iglob(os.path.join(path, radical)):
-                    yield fn
-
-if ssl:
-    from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
-                         CertificateError)
-
-
-#
-# HTTPSConnection which verifies certificates/matches domains
-#
-
-    class HTTPSConnection(httplib.HTTPSConnection):
-        ca_certs = None # set this to the path to the certs file (.pem)
-        check_domain = True # only used if ca_certs is not None
-
-        # noinspection PyPropertyAccess
-        def connect(self):
-            sock = socket.create_connection((self.host, self.port), self.timeout)
-            if getattr(self, '_tunnel_host', False):
-                self.sock = sock
-                self._tunnel()
-
-            if not hasattr(ssl, 'SSLContext'):
-                # For 2.x
-                if self.ca_certs:
-                    cert_reqs = ssl.CERT_REQUIRED
-                else:
-                    cert_reqs = ssl.CERT_NONE
-                self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
-                                            cert_reqs=cert_reqs,
-                                            ssl_version=ssl.PROTOCOL_SSLv23,
-                                            ca_certs=self.ca_certs)
-            else:  # pragma: no cover
-                context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
-                context.options |= ssl.OP_NO_SSLv2
-                if self.cert_file:
-                    context.load_cert_chain(self.cert_file, self.key_file)
-                kwargs = {}
-                if self.ca_certs:
-                    context.verify_mode = ssl.CERT_REQUIRED
-                    context.load_verify_locations(cafile=self.ca_certs)
-                    if getattr(ssl, 'HAS_SNI', False):
-                        kwargs['server_hostname'] = self.host
-                self.sock = context.wrap_socket(sock, **kwargs)
-            if self.ca_certs and self.check_domain:
-                try:
-                    match_hostname(self.sock.getpeercert(), self.host)
-                    logger.debug('Host verified: %s', self.host)
-                except CertificateError:  # pragma: no cover
-                    self.sock.shutdown(socket.SHUT_RDWR)
-                    self.sock.close()
-                    raise
-
-    class HTTPSHandler(BaseHTTPSHandler):
-        def __init__(self, ca_certs, check_domain=True):
-            BaseHTTPSHandler.__init__(self)
-            self.ca_certs = ca_certs
-            self.check_domain = check_domain
-
-        def _conn_maker(self, *args, **kwargs):
-            """
-            This is called to create a connection instance. Normally you'd
-            pass a connection class to do_open, but it doesn't actually check for
-            a class, and just expects a callable. As long as we behave just as a
-            constructor would have, we should be OK. If it ever changes so that
-            we *must* pass a class, we'll create an UnsafeHTTPSConnection class
-            which just sets check_domain to False in the class definition, and
-            choose which one to pass to do_open.
-            """
-            result = HTTPSConnection(*args, **kwargs)
-            if self.ca_certs:
-                result.ca_certs = self.ca_certs
-                result.check_domain = self.check_domain
-            return result
-
-        def https_open(self, req):
-            try:
-                return self.do_open(self._conn_maker, req)
-            except URLError as e:
-                if 'certificate verify failed' in str(e.reason):
-                    raise CertificateError('Unable to verify server certificate '
-                                           'for %s' % req.host)
-                else:
-                    raise
-
-    #
-    # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
-    # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
-    # HTML containing a http://xyz link when it should be https://xyz),
-    # you can use the following handler class, which does not allow HTTP traffic.
-    #
-    # It works by inheriting from HTTPHandler - so build_opener won't add a
-    # handler for HTTP itself.
-    #
-    class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
-        def http_open(self, req):
-            raise URLError('Unexpected HTTP request on what should be a secure '
-                           'connection: %s' % req)
-
-#
-# XML-RPC with timeouts
-#
-
-_ver_info = sys.version_info[:2]
-
-if _ver_info == (2, 6):
-    class HTTP(httplib.HTTP):
-        def __init__(self, host='', port=None, **kwargs):
-            if port == 0:   # 0 means use port 0, not the default port
-                port = None
-            self._setup(self._connection_class(host, port, **kwargs))
-
-
-    if ssl:
-        class HTTPS(httplib.HTTPS):
-            def __init__(self, host='', port=None, **kwargs):
-                if port == 0:   # 0 means use port 0, not the default port
-                    port = None
-                self._setup(self._connection_class(host, port, **kwargs))
-
-
-class Transport(xmlrpclib.Transport):
-    def __init__(self, timeout, use_datetime=0):
-        self.timeout = timeout
-        xmlrpclib.Transport.__init__(self, use_datetime)
-
-    def make_connection(self, host):
-        h, eh, x509 = self.get_host_info(host)
-        if _ver_info == (2, 6):
-            result = HTTP(h, timeout=self.timeout)
-        else:
-            if not self._connection or host != self._connection[0]:
-                self._extra_headers = eh
-                self._connection = host, httplib.HTTPConnection(h)
-            result = self._connection[1]
-        return result
-
-if ssl:
-    class SafeTransport(xmlrpclib.SafeTransport):
-        def __init__(self, timeout, use_datetime=0):
-            self.timeout = timeout
-            xmlrpclib.SafeTransport.__init__(self, use_datetime)
-
-        def make_connection(self, host):
-            h, eh, kwargs = self.get_host_info(host)
-            if not kwargs:
-                kwargs = {}
-            kwargs['timeout'] = self.timeout
-            if _ver_info == (2, 6):
-                result = HTTPS(host, None, **kwargs)
-            else:
-                if not self._connection or host != self._connection[0]:
-                    self._extra_headers = eh
-                    self._connection = host, httplib.HTTPSConnection(h, None,
-                                                                     **kwargs)
-                result = self._connection[1]
-            return result
-
-
-class ServerProxy(xmlrpclib.ServerProxy):
-    def __init__(self, uri, **kwargs):
-        self.timeout = timeout = kwargs.pop('timeout', None)
-        # The above classes only come into play if a timeout
-        # is specified
-        if timeout is not None:
-            scheme, _ = splittype(uri)
-            use_datetime = kwargs.get('use_datetime', 0)
-            if scheme == 'https':
-                tcls = SafeTransport
-            else:
-                tcls = Transport
-            kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
-            self.transport = t
-        xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
-
-#
-# CSV functionality. This is provided because on 2.x, the csv module can't
-# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
-#
-
-def _csv_open(fn, mode, **kwargs):
-    if sys.version_info[0] < 3:
-        mode += 'b'
-    else:
-        kwargs['newline'] = ''
-        # Python 3 determines encoding from locale. Force 'utf-8'
-        # file encoding to match other forced utf-8 encoding
-        kwargs['encoding'] = 'utf-8'
-    return open(fn, mode, **kwargs)
-
-
-class CSVBase(object):
-    defaults = {
-        'delimiter': str(','),      # The strs are used because we need native
-        'quotechar': str('"'),      # str in the csv API (2.x won't take
-        'lineterminator': str('\n') # Unicode)
-    }
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, *exc_info):
-        self.stream.close()
-
-
-class CSVReader(CSVBase):
-    def __init__(self, **kwargs):
-        if 'stream' in kwargs:
-            stream = kwargs['stream']
-            if sys.version_info[0] >= 3:
-                # needs to be a text stream
-                stream = codecs.getreader('utf-8')(stream)
-            self.stream = stream
-        else:
-            self.stream = _csv_open(kwargs['path'], 'r')
-        self.reader = csv.reader(self.stream, **self.defaults)
-
-    def __iter__(self):
-        return self
-
-    def next(self):
-        result = next(self.reader)
-        if sys.version_info[0] < 3:
-            for i, item in enumerate(result):
-                if not isinstance(item, text_type):
-                    result[i] = item.decode('utf-8')
-        return result
-
-    __next__ = next
-
-class CSVWriter(CSVBase):
-    def __init__(self, fn, **kwargs):
-        self.stream = _csv_open(fn, 'w')
-        self.writer = csv.writer(self.stream, **self.defaults)
-
-    def writerow(self, row):
-        if sys.version_info[0] < 3:
-            r = []
-            for item in row:
-                if isinstance(item, text_type):
-                    item = item.encode('utf-8')
-                r.append(item)
-            row = r
-        self.writer.writerow(row)
-
-#
-#   Configurator functionality
-#
-
-class Configurator(BaseConfigurator):
-
-    value_converters = dict(BaseConfigurator.value_converters)
-    value_converters['inc'] = 'inc_convert'
-
-    def __init__(self, config, base=None):
-        super(Configurator, self).__init__(config)
-        self.base = base or os.getcwd()
-
-    def configure_custom(self, config):
-        def convert(o):
-            if isinstance(o, (list, tuple)):
-                result = type(o)([convert(i) for i in o])
-            elif isinstance(o, dict):
-                if '()' in o:
-                    result = self.configure_custom(o)
-                else:
-                    result = {}
-                    for k in o:
-                        result[k] = convert(o[k])
-            else:
-                result = self.convert(o)
-            return result
-
-        c = config.pop('()')
-        if not callable(c):
-            c = self.resolve(c)
-        props = config.pop('.', None)
-        # Check for valid identifiers
-        args = config.pop('[]', ())
-        if args:
-            args = tuple([convert(o) for o in args])
-        items = [(k, convert(config[k])) for k in config if valid_ident(k)]
-        kwargs = dict(items)
-        result = c(*args, **kwargs)
-        if props:
-            for n, v in props.items():
-                setattr(result, n, convert(v))
-        return result
-
-    def __getitem__(self, key):
-        result = self.config[key]
-        if isinstance(result, dict) and '()' in result:
-            self.config[key] = result = self.configure_custom(result)
-        return result
-
-    def inc_convert(self, value):
-        """Default converter for the inc:// protocol."""
-        if not os.path.isabs(value):
-            value = os.path.join(self.base, value)
-        with codecs.open(value, 'r', encoding='utf-8') as f:
-            result = json.load(f)
-        return result
-
-
-class SubprocessMixin(object):
-    """
-    Mixin for running subprocesses and capturing their output
-    """
-    def __init__(self, verbose=False, progress=None):
-        self.verbose = verbose
-        self.progress = progress
-
-    def reader(self, stream, context):
-        """
-        Read lines from a subprocess' output stream and either pass to a progress
-        callable (if specified) or write progress information to sys.stderr.
-        """
-        progress = self.progress
-        verbose = self.verbose
-        while True:
-            s = stream.readline()
-            if not s:
-                break
-            if progress is not None:
-                progress(s, context)
-            else:
-                if not verbose:
-                    sys.stderr.write('.')
-                else:
-                    sys.stderr.write(s.decode('utf-8'))
-                sys.stderr.flush()
-        stream.close()
-
-    def run_command(self, cmd, **kwargs):
-        p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                             stderr=subprocess.PIPE, **kwargs)
-        t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
-        t1.start()
-        t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
-        t2.start()
-        p.wait()
-        t1.join()
-        t2.join()
-        if self.progress is not None:
-            self.progress('done.', 'main')
-        elif self.verbose:
-            sys.stderr.write('done.\n')
-        return p
-
-
-def normalize_name(name):
-    """Normalize a python package name a la PEP 503"""
-    # https://www.python.org/dev/peps/pep-0503/#normalized-names
-    return re.sub('[-_.]+', '-', name).lower()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/version.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/version.py
deleted file mode 100644
index 3eebe18..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/version.py
+++ /dev/null
@@ -1,736 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2012-2017 The Python Software Foundation.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-"""
-Implementation of a flexible versioning scheme providing support for PEP-440,
-setuptools-compatible and semantic versioning.
-"""
-
-import logging
-import re
-
-from .compat import string_types
-from .util import parse_requirement
-
-__all__ = ['NormalizedVersion', 'NormalizedMatcher',
-           'LegacyVersion', 'LegacyMatcher',
-           'SemanticVersion', 'SemanticMatcher',
-           'UnsupportedVersionError', 'get_scheme']
-
-logger = logging.getLogger(__name__)
-
-
-class UnsupportedVersionError(ValueError):
-    """This is an unsupported version."""
-    pass
-
-
-class Version(object):
-    def __init__(self, s):
-        self._string = s = s.strip()
-        self._parts = parts = self.parse(s)
-        assert isinstance(parts, tuple)
-        assert len(parts) > 0
-
-    def parse(self, s):
-        raise NotImplementedError('please implement in a subclass')
-
-    def _check_compatible(self, other):
-        if type(self) != type(other):
-            raise TypeError('cannot compare %r and %r' % (self, other))
-
-    def __eq__(self, other):
-        self._check_compatible(other)
-        return self._parts == other._parts
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    def __lt__(self, other):
-        self._check_compatible(other)
-        return self._parts < other._parts
-
-    def __gt__(self, other):
-        return not (self.__lt__(other) or self.__eq__(other))
-
-    def __le__(self, other):
-        return self.__lt__(other) or self.__eq__(other)
-
-    def __ge__(self, other):
-        return self.__gt__(other) or self.__eq__(other)
-
-    # See http://docs.python.org/reference/datamodel#object.__hash__
-    def __hash__(self):
-        return hash(self._parts)
-
-    def __repr__(self):
-        return "%s('%s')" % (self.__class__.__name__, self._string)
-
-    def __str__(self):
-        return self._string
-
-    @property
-    def is_prerelease(self):
-        raise NotImplementedError('Please implement in subclasses.')
-
-
-class Matcher(object):
-    version_class = None
-
-    # value is either a callable or the name of a method
-    _operators = {
-        '<': lambda v, c, p: v < c,
-        '>': lambda v, c, p: v > c,
-        '<=': lambda v, c, p: v == c or v < c,
-        '>=': lambda v, c, p: v == c or v > c,
-        '==': lambda v, c, p: v == c,
-        '===': lambda v, c, p: v == c,
-        # by default, compatible => >=.
-        '~=': lambda v, c, p: v == c or v > c,
-        '!=': lambda v, c, p: v != c,
-    }
-
-    # this is a method only to support alternative implementations
-    # via overriding
-    def parse_requirement(self, s):
-        return parse_requirement(s)
-
-    def __init__(self, s):
-        if self.version_class is None:
-            raise ValueError('Please specify a version class')
-        self._string = s = s.strip()
-        r = self.parse_requirement(s)
-        if not r:
-            raise ValueError('Not valid: %r' % s)
-        self.name = r.name
-        self.key = self.name.lower()    # for case-insensitive comparisons
-        clist = []
-        if r.constraints:
-            # import pdb; pdb.set_trace()
-            for op, s in r.constraints:
-                if s.endswith('.*'):
-                    if op not in ('==', '!='):
-                        raise ValueError('\'.*\' not allowed for '
-                                         '%r constraints' % op)
-                    # Could be a partial version (e.g. for '2.*') which
-                    # won't parse as a version, so keep it as a string
-                    vn, prefix = s[:-2], True
-                    # Just to check that vn is a valid version
-                    self.version_class(vn)
-                else:
-                    # Should parse as a version, so we can create an
-                    # instance for the comparison
-                    vn, prefix = self.version_class(s), False
-                clist.append((op, vn, prefix))
-        self._parts = tuple(clist)
-
-    def match(self, version):
-        """
-        Check if the provided version matches the constraints.
-
-        :param version: The version to match against this instance.
-        :type version: String or :class:`Version` instance.
-        """
-        if isinstance(version, string_types):
-            version = self.version_class(version)
-        for operator, constraint, prefix in self._parts:
-            f = self._operators.get(operator)
-            if isinstance(f, string_types):
-                f = getattr(self, f)
-            if not f:
-                msg = ('%r not implemented '
-                       'for %s' % (operator, self.__class__.__name__))
-                raise NotImplementedError(msg)
-            if not f(version, constraint, prefix):
-                return False
-        return True
-
-    @property
-    def exact_version(self):
-        result = None
-        if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
-            result = self._parts[0][1]
-        return result
-
-    def _check_compatible(self, other):
-        if type(self) != type(other) or self.name != other.name:
-            raise TypeError('cannot compare %s and %s' % (self, other))
-
-    def __eq__(self, other):
-        self._check_compatible(other)
-        return self.key == other.key and self._parts == other._parts
-
-    def __ne__(self, other):
-        return not self.__eq__(other)
-
-    # See http://docs.python.org/reference/datamodel#object.__hash__
-    def __hash__(self):
-        return hash(self.key) + hash(self._parts)
-
-    def __repr__(self):
-        return "%s(%r)" % (self.__class__.__name__, self._string)
-
-    def __str__(self):
-        return self._string
-
-
-PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?'
-                               r'(\.(post)(\d+))?(\.(dev)(\d+))?'
-                               r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$')
-
-
-def _pep_440_key(s):
-    s = s.strip()
-    m = PEP440_VERSION_RE.match(s)
-    if not m:
-        raise UnsupportedVersionError('Not a valid version: %s' % s)
-    groups = m.groups()
-    nums = tuple(int(v) for v in groups[1].split('.'))
-    while len(nums) > 1 and nums[-1] == 0:
-        nums = nums[:-1]
-
-    if not groups[0]:
-        epoch = 0
-    else:
-        epoch = int(groups[0])
-    pre = groups[4:6]
-    post = groups[7:9]
-    dev = groups[10:12]
-    local = groups[13]
-    if pre == (None, None):
-        pre = ()
-    else:
-        pre = pre[0], int(pre[1])
-    if post == (None, None):
-        post = ()
-    else:
-        post = post[0], int(post[1])
-    if dev == (None, None):
-        dev = ()
-    else:
-        dev = dev[0], int(dev[1])
-    if local is None:
-        local = ()
-    else:
-        parts = []
-        for part in local.split('.'):
-            # to ensure that numeric compares as > lexicographic, avoid
-            # comparing them directly, but encode a tuple which ensures
-            # correct sorting
-            if part.isdigit():
-                part = (1, int(part))
-            else:
-                part = (0, part)
-            parts.append(part)
-        local = tuple(parts)
-    if not pre:
-        # either before pre-release, or final release and after
-        if not post and dev:
-            # before pre-release
-            pre = ('a', -1)     # to sort before a0
-        else:
-            pre = ('z',)        # to sort after all pre-releases
-    # now look at the state of post and dev.
-    if not post:
-        post = ('_',)   # sort before 'a'
-    if not dev:
-        dev = ('final',)
-
-    #print('%s -> %s' % (s, m.groups()))
-    return epoch, nums, pre, post, dev, local
-
-
-_normalized_key = _pep_440_key
-
-
-class NormalizedVersion(Version):
-    """A rational version.
-
-    Good:
-        1.2         # equivalent to "1.2.0"
-        1.2.0
-        1.2a1
-        1.2.3a2
-        1.2.3b1
-        1.2.3c1
-        1.2.3.4
-        TODO: fill this out
-
-    Bad:
-        1           # minimum two numbers
-        1.2a        # release level must have a release serial
-        1.2.3b
-    """
-    def parse(self, s):
-        result = _normalized_key(s)
-        # _normalized_key loses trailing zeroes in the release
-        # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
-        # However, PEP 440 prefix matching needs it: for example,
-        # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
-        m = PEP440_VERSION_RE.match(s)      # must succeed
-        groups = m.groups()
-        self._release_clause = tuple(int(v) for v in groups[1].split('.'))
-        return result
-
-    PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
-
-    @property
-    def is_prerelease(self):
-        return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
-
-
-def _match_prefix(x, y):
-    x = str(x)
-    y = str(y)
-    if x == y:
-        return True
-    if not x.startswith(y):
-        return False
-    n = len(y)
-    return x[n] == '.'
-
-
-class NormalizedMatcher(Matcher):
-    version_class = NormalizedVersion
-
-    # value is either a callable or the name of a method
-    _operators = {
-        '~=': '_match_compatible',
-        '<': '_match_lt',
-        '>': '_match_gt',
-        '<=': '_match_le',
-        '>=': '_match_ge',
-        '==': '_match_eq',
-        '===': '_match_arbitrary',
-        '!=': '_match_ne',
-    }
-
-    def _adjust_local(self, version, constraint, prefix):
-        if prefix:
-            strip_local = '+' not in constraint and version._parts[-1]
-        else:
-            # both constraint and version are
-            # NormalizedVersion instances.
-            # If constraint does not have a local component,
-            # ensure the version doesn't, either.
-            strip_local = not constraint._parts[-1] and version._parts[-1]
-        if strip_local:
-            s = version._string.split('+', 1)[0]
-            version = self.version_class(s)
-        return version, constraint
-
-    def _match_lt(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        if version >= constraint:
-            return False
-        release_clause = constraint._release_clause
-        pfx = '.'.join([str(i) for i in release_clause])
-        return not _match_prefix(version, pfx)
-
-    def _match_gt(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        if version <= constraint:
-            return False
-        release_clause = constraint._release_clause
-        pfx = '.'.join([str(i) for i in release_clause])
-        return not _match_prefix(version, pfx)
-
-    def _match_le(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        return version <= constraint
-
-    def _match_ge(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        return version >= constraint
-
-    def _match_eq(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        if not prefix:
-            result = (version == constraint)
-        else:
-            result = _match_prefix(version, constraint)
-        return result
-
-    def _match_arbitrary(self, version, constraint, prefix):
-        return str(version) == str(constraint)
-
-    def _match_ne(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        if not prefix:
-            result = (version != constraint)
-        else:
-            result = not _match_prefix(version, constraint)
-        return result
-
-    def _match_compatible(self, version, constraint, prefix):
-        version, constraint = self._adjust_local(version, constraint, prefix)
-        if version == constraint:
-            return True
-        if version < constraint:
-            return False
-#        if not prefix:
-#            return True
-        release_clause = constraint._release_clause
-        if len(release_clause) > 1:
-            release_clause = release_clause[:-1]
-        pfx = '.'.join([str(i) for i in release_clause])
-        return _match_prefix(version, pfx)
-
-_REPLACEMENTS = (
-    (re.compile('[.+-]$'), ''),                     # remove trailing puncts
-    (re.compile(r'^[.](\d)'), r'0.\1'),             # .N -> 0.N at start
-    (re.compile('^[.-]'), ''),                      # remove leading puncts
-    (re.compile(r'^\((.*)\)$'), r'\1'),             # remove parentheses
-    (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'),    # remove leading v(ersion)
-    (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'),        # remove leading v(ersion)
-    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
-    (re.compile(r'\b(alfa|apha)\b'), 'alpha'),      # misspelt alpha
-    (re.compile(r'\b(pre-alpha|prealpha)\b'),
-                'pre.alpha'),                       # standardise
-    (re.compile(r'\(beta\)$'), 'beta'),             # remove parentheses
-)
-
-_SUFFIX_REPLACEMENTS = (
-    (re.compile('^[:~._+-]+'), ''),                   # remove leading puncts
-    (re.compile('[,*")([\\]]'), ''),                  # remove unwanted chars
-    (re.compile('[~:+_ -]'), '.'),                    # replace illegal chars
-    (re.compile('[.]{2,}'), '.'),                   # multiple runs of '.'
-    (re.compile(r'\.$'), ''),                       # trailing '.'
-)
-
-_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
-
-
-def _suggest_semantic_version(s):
-    """
-    Try to suggest a semantic form for a version for which
-    _suggest_normalized_version couldn't come up with anything.
-    """
-    result = s.strip().lower()
-    for pat, repl in _REPLACEMENTS:
-        result = pat.sub(repl, result)
-    if not result:
-        result = '0.0.0'
-
-    # Now look for numeric prefix, and separate it out from
-    # the rest.
-    #import pdb; pdb.set_trace()
-    m = _NUMERIC_PREFIX.match(result)
-    if not m:
-        prefix = '0.0.0'
-        suffix = result
-    else:
-        prefix = m.groups()[0].split('.')
-        prefix = [int(i) for i in prefix]
-        while len(prefix) < 3:
-            prefix.append(0)
-        if len(prefix) == 3:
-            suffix = result[m.end():]
-        else:
-            suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
-            prefix = prefix[:3]
-        prefix = '.'.join([str(i) for i in prefix])
-        suffix = suffix.strip()
-    if suffix:
-        #import pdb; pdb.set_trace()
-        # massage the suffix.
-        for pat, repl in _SUFFIX_REPLACEMENTS:
-            suffix = pat.sub(repl, suffix)
-
-    if not suffix:
-        result = prefix
-    else:
-        sep = '-' if 'dev' in suffix else '+'
-        result = prefix + sep + suffix
-    if not is_semver(result):
-        result = None
-    return result
-
-
-def _suggest_normalized_version(s):
-    """Suggest a normalized version close to the given version string.
-
-    If you have a version string that isn't rational (i.e. NormalizedVersion
-    doesn't like it) then you might be able to get an equivalent (or close)
-    rational version from this function.
-
-    This does a number of simple normalizations to the given string, based
-    on observation of versions currently in use on PyPI. Given a dump of
-    those version during PyCon 2009, 4287 of them:
-    - 2312 (53.93%) match NormalizedVersion without change
-      with the automatic suggestion
-    - 3474 (81.04%) match when using this suggestion method
-
-    @param s {str} An irrational version string.
-    @returns A rational version string, or None, if couldn't determine one.
-    """
-    try:
-        _normalized_key(s)
-        return s   # already rational
-    except UnsupportedVersionError:
-        pass
-
-    rs = s.lower()
-
-    # part of this could use maketrans
-    for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
-                       ('beta', 'b'), ('rc', 'c'), ('-final', ''),
-                       ('-pre', 'c'),
-                       ('-release', ''), ('.release', ''), ('-stable', ''),
-                       ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
-                       ('final', '')):
-        rs = rs.replace(orig, repl)
-
-    # if something ends with dev or pre, we add a 0
-    rs = re.sub(r"pre$", r"pre0", rs)
-    rs = re.sub(r"dev$", r"dev0", rs)
-
-    # if we have something like "b-2" or "a.2" at the end of the
-    # version, that is probably beta, alpha, etc
-    # let's remove the dash or dot
-    rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
-
-    # 1.0-dev-r371 -> 1.0.dev371
-    # 0.1-dev-r79 -> 0.1.dev79
-    rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
-
-    # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
-    rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
-
-    # Clean: v0.3, v1.0
-    if rs.startswith('v'):
-        rs = rs[1:]
-
-    # Clean leading '0's on numbers.
-    #TODO: unintended side-effect on, e.g., "2003.05.09"
-    # PyPI stats: 77 (~2%) better
-    rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
-
-    # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
-    # zero.
-    # PyPI stats: 245 (7.56%) better
-    rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
-
-    # the 'dev-rNNN' tag is a dev tag
-    rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
-
-    # clean the - when used as a pre delimiter
-    rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
-
-    # a terminal "dev" or "devel" can be changed into ".dev0"
-    rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
-
-    # a terminal "dev" can be changed into ".dev0"
-    rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
-
-    # a terminal "final" or "stable" can be removed
-    rs = re.sub(r"(final|stable)$", "", rs)
-
-    # The 'r' and the '-' tags are post release tags
-    #   0.4a1.r10       ->  0.4a1.post10
-    #   0.9.33-17222    ->  0.9.33.post17222
-    #   0.9.33-r17222   ->  0.9.33.post17222
-    rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
-
-    # Clean 'r' instead of 'dev' usage:
-    #   0.9.33+r17222   ->  0.9.33.dev17222
-    #   1.0dev123       ->  1.0.dev123
-    #   1.0.git123      ->  1.0.dev123
-    #   1.0.bzr123      ->  1.0.dev123
-    #   0.1a0dev.123    ->  0.1a0.dev123
-    # PyPI stats:  ~150 (~4%) better
-    rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
-
-    # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
-    #   0.2.pre1        ->  0.2c1
-    #   0.2-c1         ->  0.2c1
-    #   1.0preview123   ->  1.0c123
-    # PyPI stats: ~21 (0.62%) better
-    rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
-
-    # Tcl/Tk uses "px" for their post release markers
-    rs = re.sub(r"p(\d+)$", r".post\1", rs)
-
-    try:
-        _normalized_key(rs)
-    except UnsupportedVersionError:
-        rs = None
-    return rs
-
-#
-#   Legacy version processing (distribute-compatible)
-#
-
-_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
-_VERSION_REPLACE = {
-    'pre': 'c',
-    'preview': 'c',
-    '-': 'final-',
-    'rc': 'c',
-    'dev': '@',
-    '': None,
-    '.': None,
-}
-
-
-def _legacy_key(s):
-    def get_parts(s):
-        result = []
-        for p in _VERSION_PART.split(s.lower()):
-            p = _VERSION_REPLACE.get(p, p)
-            if p:
-                if '0' <= p[:1] <= '9':
-                    p = p.zfill(8)
-                else:
-                    p = '*' + p
-                result.append(p)
-        result.append('*final')
-        return result
-
-    result = []
-    for p in get_parts(s):
-        if p.startswith('*'):
-            if p < '*final':
-                while result and result[-1] == '*final-':
-                    result.pop()
-            while result and result[-1] == '00000000':
-                result.pop()
-        result.append(p)
-    return tuple(result)
-
-
-class LegacyVersion(Version):
-    def parse(self, s):
-        return _legacy_key(s)
-
-    @property
-    def is_prerelease(self):
-        result = False
-        for x in self._parts:
-            if (isinstance(x, string_types) and x.startswith('*') and
-                x < '*final'):
-                result = True
-                break
-        return result
-
-
-class LegacyMatcher(Matcher):
-    version_class = LegacyVersion
-
-    _operators = dict(Matcher._operators)
-    _operators['~='] = '_match_compatible'
-
-    numeric_re = re.compile(r'^(\d+(\.\d+)*)')
-
-    def _match_compatible(self, version, constraint, prefix):
-        if version < constraint:
-            return False
-        m = self.numeric_re.match(str(constraint))
-        if not m:
-            logger.warning('Cannot compute compatible match for version %s '
-                           ' and constraint %s', version, constraint)
-            return True
-        s = m.groups()[0]
-        if '.' in s:
-            s = s.rsplit('.', 1)[0]
-        return _match_prefix(version, s)
-
-#
-#   Semantic versioning
-#
-
-_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
-                        r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
-                        r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
-
-
-def is_semver(s):
-    return _SEMVER_RE.match(s)
-
-
-def _semantic_key(s):
-    def make_tuple(s, absent):
-        if s is None:
-            result = (absent,)
-        else:
-            parts = s[1:].split('.')
-            # We can't compare ints and strings on Python 3, so fudge it
-            # by zero-filling numeric values so simulate a numeric comparison
-            result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
-        return result
-
-    m = is_semver(s)
-    if not m:
-        raise UnsupportedVersionError(s)
-    groups = m.groups()
-    major, minor, patch = [int(i) for i in groups[:3]]
-    # choose the '|' and '*' so that versions sort correctly
-    pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
-    return (major, minor, patch), pre, build
-
-
-class SemanticVersion(Version):
-    def parse(self, s):
-        return _semantic_key(s)
-
-    @property
-    def is_prerelease(self):
-        return self._parts[1][0] != '|'
-
-
-class SemanticMatcher(Matcher):
-    version_class = SemanticVersion
-
-
-class VersionScheme(object):
-    def __init__(self, key, matcher, suggester=None):
-        self.key = key
-        self.matcher = matcher
-        self.suggester = suggester
-
-    def is_valid_version(self, s):
-        try:
-            self.matcher.version_class(s)
-            result = True
-        except UnsupportedVersionError:
-            result = False
-        return result
-
-    def is_valid_matcher(self, s):
-        try:
-            self.matcher(s)
-            result = True
-        except UnsupportedVersionError:
-            result = False
-        return result
-
-    def is_valid_constraint_list(self, s):
-        """
-        Used for processing some metadata fields
-        """
-        return self.is_valid_matcher('dummy_name (%s)' % s)
-
-    def suggest(self, s):
-        if self.suggester is None:
-            result = None
-        else:
-            result = self.suggester(s)
-        return result
-
-_SCHEMES = {
-    'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
-                                _suggest_normalized_version),
-    'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
-    'semantic': VersionScheme(_semantic_key, SemanticMatcher,
-                              _suggest_semantic_version),
-}
-
-_SCHEMES['default'] = _SCHEMES['normalized']
-
-
-def get_scheme(name):
-    if name not in _SCHEMES:
-        raise ValueError('unknown scheme name: %r' % name)
-    return _SCHEMES[name]
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe
deleted file mode 100644
index 4df7700..0000000
Binary files a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w32.exe and /dev/null differ
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe
deleted file mode 100644
index 63ce483..0000000
Binary files a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/w64.exe and /dev/null differ
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py
deleted file mode 100644
index 0c8efad..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distlib/wheel.py
+++ /dev/null
@@ -1,1004 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2013-2017 Vinay Sajip.
-# Licensed to the Python Software Foundation under a contributor agreement.
-# See LICENSE.txt and CONTRIBUTORS.txt.
-#
-from __future__ import unicode_literals
-
-import base64
-import codecs
-import datetime
-import distutils.util
-from email import message_from_file
-import hashlib
-import imp
-import json
-import logging
-import os
-import posixpath
-import re
-import shutil
-import sys
-import tempfile
-import zipfile
-
-from . import __version__, DistlibException
-from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
-from .database import InstalledDistribution
-from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME
-from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
-                   cached_property, get_cache_base, read_exports, tempdir)
-from .version import NormalizedVersion, UnsupportedVersionError
-
-logger = logging.getLogger(__name__)
-
-cache = None    # created when needed
-
-if hasattr(sys, 'pypy_version_info'):  # pragma: no cover
-    IMP_PREFIX = 'pp'
-elif sys.platform.startswith('java'):  # pragma: no cover
-    IMP_PREFIX = 'jy'
-elif sys.platform == 'cli':  # pragma: no cover
-    IMP_PREFIX = 'ip'
-else:
-    IMP_PREFIX = 'cp'
-
-VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
-if not VER_SUFFIX:   # pragma: no cover
-    VER_SUFFIX = '%s%s' % sys.version_info[:2]
-PYVER = 'py' + VER_SUFFIX
-IMPVER = IMP_PREFIX + VER_SUFFIX
-
-ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
-
-ABI = sysconfig.get_config_var('SOABI')
-if ABI and ABI.startswith('cpython-'):
-    ABI = ABI.replace('cpython-', 'cp')
-else:
-    def _derive_abi():
-        parts = ['cp', VER_SUFFIX]
-        if sysconfig.get_config_var('Py_DEBUG'):
-            parts.append('d')
-        if sysconfig.get_config_var('WITH_PYMALLOC'):
-            parts.append('m')
-        if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
-            parts.append('u')
-        return ''.join(parts)
-    ABI = _derive_abi()
-    del _derive_abi
-
-FILENAME_RE = re.compile(r'''
-(?P[^-]+)
--(?P\d+[^-]*)
-(-(?P\d+[^-]*))?
--(?P\w+\d+(\.\w+\d+)*)
--(?P\w+)
--(?P\w+(\.\w+)*)
-\.whl$
-''', re.IGNORECASE | re.VERBOSE)
-
-NAME_VERSION_RE = re.compile(r'''
-(?P[^-]+)
--(?P\d+[^-]*)
-(-(?P\d+[^-]*))?$
-''', re.IGNORECASE | re.VERBOSE)
-
-SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
-SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
-SHEBANG_PYTHON = b'#!python'
-SHEBANG_PYTHONW = b'#!pythonw'
-
-if os.sep == '/':
-    to_posix = lambda o: o
-else:
-    to_posix = lambda o: o.replace(os.sep, '/')
-
-
-class Mounter(object):
-    def __init__(self):
-        self.impure_wheels = {}
-        self.libs = {}
-
-    def add(self, pathname, extensions):
-        self.impure_wheels[pathname] = extensions
-        self.libs.update(extensions)
-
-    def remove(self, pathname):
-        extensions = self.impure_wheels.pop(pathname)
-        for k, v in extensions:
-            if k in self.libs:
-                del self.libs[k]
-
-    def find_module(self, fullname, path=None):
-        if fullname in self.libs:
-            result = self
-        else:
-            result = None
-        return result
-
-    def load_module(self, fullname):
-        if fullname in sys.modules:
-            result = sys.modules[fullname]
-        else:
-            if fullname not in self.libs:
-                raise ImportError('unable to find extension for %s' % fullname)
-            result = imp.load_dynamic(fullname, self.libs[fullname])
-            result.__loader__ = self
-            parts = fullname.rsplit('.', 1)
-            if len(parts) > 1:
-                result.__package__ = parts[0]
-        return result
-
-_hook = Mounter()
-
-
-class Wheel(object):
-    """
-    Class to build and install from Wheel files (PEP 427).
-    """
-
-    wheel_version = (1, 1)
-    hash_kind = 'sha256'
-
-    def __init__(self, filename=None, sign=False, verify=False):
-        """
-        Initialise an instance using a (valid) filename.
-        """
-        self.sign = sign
-        self.should_verify = verify
-        self.buildver = ''
-        self.pyver = [PYVER]
-        self.abi = ['none']
-        self.arch = ['any']
-        self.dirname = os.getcwd()
-        if filename is None:
-            self.name = 'dummy'
-            self.version = '0.1'
-            self._filename = self.filename
-        else:
-            m = NAME_VERSION_RE.match(filename)
-            if m:
-                info = m.groupdict('')
-                self.name = info['nm']
-                # Reinstate the local version separator
-                self.version = info['vn'].replace('_', '-')
-                self.buildver = info['bn']
-                self._filename = self.filename
-            else:
-                dirname, filename = os.path.split(filename)
-                m = FILENAME_RE.match(filename)
-                if not m:
-                    raise DistlibException('Invalid name or '
-                                           'filename: %r' % filename)
-                if dirname:
-                    self.dirname = os.path.abspath(dirname)
-                self._filename = filename
-                info = m.groupdict('')
-                self.name = info['nm']
-                self.version = info['vn']
-                self.buildver = info['bn']
-                self.pyver = info['py'].split('.')
-                self.abi = info['bi'].split('.')
-                self.arch = info['ar'].split('.')
-
-    @property
-    def filename(self):
-        """
-        Build and return a filename from the various components.
-        """
-        if self.buildver:
-            buildver = '-' + self.buildver
-        else:
-            buildver = ''
-        pyver = '.'.join(self.pyver)
-        abi = '.'.join(self.abi)
-        arch = '.'.join(self.arch)
-        # replace - with _ as a local version separator
-        version = self.version.replace('-', '_')
-        return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
-                                         pyver, abi, arch)
-
-    @property
-    def exists(self):
-        path = os.path.join(self.dirname, self.filename)
-        return os.path.isfile(path)
-
-    @property
-    def tags(self):
-        for pyver in self.pyver:
-            for abi in self.abi:
-                for arch in self.arch:
-                    yield pyver, abi, arch
-
-    @cached_property
-    def metadata(self):
-        pathname = os.path.join(self.dirname, self.filename)
-        name_ver = '%s-%s' % (self.name, self.version)
-        info_dir = '%s.dist-info' % name_ver
-        wrapper = codecs.getreader('utf-8')
-        with ZipFile(pathname, 'r') as zf:
-            wheel_metadata = self.get_wheel_metadata(zf)
-            wv = wheel_metadata['Wheel-Version'].split('.', 1)
-            file_version = tuple([int(i) for i in wv])
-            if file_version < (1, 1):
-                fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME, 'METADATA']
-            else:
-                fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
-            result = None
-            for fn in fns:
-                try:
-                    metadata_filename = posixpath.join(info_dir, fn)
-                    with zf.open(metadata_filename) as bf:
-                        wf = wrapper(bf)
-                        result = Metadata(fileobj=wf)
-                        if result:
-                            break
-                except KeyError:
-                    pass
-            if not result:
-                raise ValueError('Invalid wheel, because metadata is '
-                                 'missing: looked in %s' % ', '.join(fns))
-        return result
-
-    def get_wheel_metadata(self, zf):
-        name_ver = '%s-%s' % (self.name, self.version)
-        info_dir = '%s.dist-info' % name_ver
-        metadata_filename = posixpath.join(info_dir, 'WHEEL')
-        with zf.open(metadata_filename) as bf:
-            wf = codecs.getreader('utf-8')(bf)
-            message = message_from_file(wf)
-        return dict(message)
-
-    @cached_property
-    def info(self):
-        pathname = os.path.join(self.dirname, self.filename)
-        with ZipFile(pathname, 'r') as zf:
-            result = self.get_wheel_metadata(zf)
-        return result
-
-    def process_shebang(self, data):
-        m = SHEBANG_RE.match(data)
-        if m:
-            end = m.end()
-            shebang, data_after_shebang = data[:end], data[end:]
-            # Preserve any arguments after the interpreter
-            if b'pythonw' in shebang.lower():
-                shebang_python = SHEBANG_PYTHONW
-            else:
-                shebang_python = SHEBANG_PYTHON
-            m = SHEBANG_DETAIL_RE.match(shebang)
-            if m:
-                args = b' ' + m.groups()[-1]
-            else:
-                args = b''
-            shebang = shebang_python + args
-            data = shebang + data_after_shebang
-        else:
-            cr = data.find(b'\r')
-            lf = data.find(b'\n')
-            if cr < 0 or cr > lf:
-                term = b'\n'
-            else:
-                if data[cr:cr + 2] == b'\r\n':
-                    term = b'\r\n'
-                else:
-                    term = b'\r'
-            data = SHEBANG_PYTHON + term + data
-        return data
-
-    def get_hash(self, data, hash_kind=None):
-        if hash_kind is None:
-            hash_kind = self.hash_kind
-        try:
-            hasher = getattr(hashlib, hash_kind)
-        except AttributeError:
-            raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
-        result = hasher(data).digest()
-        result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
-        return hash_kind, result
-
-    def write_record(self, records, record_path, base):
-        records = list(records) # make a copy for sorting
-        p = to_posix(os.path.relpath(record_path, base))
-        records.append((p, '', ''))
-        records.sort()
-        with CSVWriter(record_path) as writer:
-            for row in records:
-                writer.writerow(row)
-
-    def write_records(self, info, libdir, archive_paths):
-        records = []
-        distinfo, info_dir = info
-        hasher = getattr(hashlib, self.hash_kind)
-        for ap, p in archive_paths:
-            with open(p, 'rb') as f:
-                data = f.read()
-            digest = '%s=%s' % self.get_hash(data)
-            size = os.path.getsize(p)
-            records.append((ap, digest, size))
-
-        p = os.path.join(distinfo, 'RECORD')
-        self.write_record(records, p, libdir)
-        ap = to_posix(os.path.join(info_dir, 'RECORD'))
-        archive_paths.append((ap, p))
-
-    def build_zip(self, pathname, archive_paths):
-        with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
-            for ap, p in archive_paths:
-                logger.debug('Wrote %s to %s in wheel', p, ap)
-                zf.write(p, ap)
-
-    def build(self, paths, tags=None, wheel_version=None):
-        """
-        Build a wheel from files in specified paths, and use any specified tags
-        when determining the name of the wheel.
-        """
-        if tags is None:
-            tags = {}
-
-        libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
-        if libkey == 'platlib':
-            is_pure = 'false'
-            default_pyver = [IMPVER]
-            default_abi = [ABI]
-            default_arch = [ARCH]
-        else:
-            is_pure = 'true'
-            default_pyver = [PYVER]
-            default_abi = ['none']
-            default_arch = ['any']
-
-        self.pyver = tags.get('pyver', default_pyver)
-        self.abi = tags.get('abi', default_abi)
-        self.arch = tags.get('arch', default_arch)
-
-        libdir = paths[libkey]
-
-        name_ver = '%s-%s' % (self.name, self.version)
-        data_dir = '%s.data' % name_ver
-        info_dir = '%s.dist-info' % name_ver
-
-        archive_paths = []
-
-        # First, stuff which is not in site-packages
-        for key in ('data', 'headers', 'scripts'):
-            if key not in paths:
-                continue
-            path = paths[key]
-            if os.path.isdir(path):
-                for root, dirs, files in os.walk(path):
-                    for fn in files:
-                        p = fsdecode(os.path.join(root, fn))
-                        rp = os.path.relpath(p, path)
-                        ap = to_posix(os.path.join(data_dir, key, rp))
-                        archive_paths.append((ap, p))
-                        if key == 'scripts' and not p.endswith('.exe'):
-                            with open(p, 'rb') as f:
-                                data = f.read()
-                            data = self.process_shebang(data)
-                            with open(p, 'wb') as f:
-                                f.write(data)
-
-        # Now, stuff which is in site-packages, other than the
-        # distinfo stuff.
-        path = libdir
-        distinfo = None
-        for root, dirs, files in os.walk(path):
-            if root == path:
-                # At the top level only, save distinfo for later
-                # and skip it for now
-                for i, dn in enumerate(dirs):
-                    dn = fsdecode(dn)
-                    if dn.endswith('.dist-info'):
-                        distinfo = os.path.join(root, dn)
-                        del dirs[i]
-                        break
-                assert distinfo, '.dist-info directory expected, not found'
-
-            for fn in files:
-                # comment out next suite to leave .pyc files in
-                if fsdecode(fn).endswith(('.pyc', '.pyo')):
-                    continue
-                p = os.path.join(root, fn)
-                rp = to_posix(os.path.relpath(p, path))
-                archive_paths.append((rp, p))
-
-        # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
-        files = os.listdir(distinfo)
-        for fn in files:
-            if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
-                p = fsdecode(os.path.join(distinfo, fn))
-                ap = to_posix(os.path.join(info_dir, fn))
-                archive_paths.append((ap, p))
-
-        wheel_metadata = [
-            'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
-            'Generator: distlib %s' % __version__,
-            'Root-Is-Purelib: %s' % is_pure,
-        ]
-        for pyver, abi, arch in self.tags:
-            wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
-        p = os.path.join(distinfo, 'WHEEL')
-        with open(p, 'w') as f:
-            f.write('\n'.join(wheel_metadata))
-        ap = to_posix(os.path.join(info_dir, 'WHEEL'))
-        archive_paths.append((ap, p))
-
-        # Now, at last, RECORD.
-        # Paths in here are archive paths - nothing else makes sense.
-        self.write_records((distinfo, info_dir), libdir, archive_paths)
-        # Now, ready to build the zip file
-        pathname = os.path.join(self.dirname, self.filename)
-        self.build_zip(pathname, archive_paths)
-        return pathname
-
-    def skip_entry(self, arcname):
-        """
-        Determine whether an archive entry should be skipped when verifying
-        or installing.
-        """
-        # The signature file won't be in RECORD,
-        # and we  don't currently don't do anything with it
-        # We also skip directories, as they won't be in RECORD
-        # either. See:
-        #
-        # https://github.com/pypa/wheel/issues/294
-        # https://github.com/pypa/wheel/issues/287
-        # https://github.com/pypa/wheel/pull/289
-        #
-        return arcname.endswith(('/', '/RECORD.jws'))
-
-    def install(self, paths, maker, **kwargs):
-        """
-        Install a wheel to the specified paths. If kwarg ``warner`` is
-        specified, it should be a callable, which will be called with two
-        tuples indicating the wheel version of this software and the wheel
-        version in the file, if there is a discrepancy in the versions.
-        This can be used to issue any warnings to raise any exceptions.
-        If kwarg ``lib_only`` is True, only the purelib/platlib files are
-        installed, and the headers, scripts, data and dist-info metadata are
-        not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
-        bytecode will try to use file-hash based invalidation (PEP-552) on
-        supported interpreter versions (CPython 2.7+).
-
-        The return value is a :class:`InstalledDistribution` instance unless
-        ``options.lib_only`` is True, in which case the return value is ``None``.
-        """
-
-        dry_run = maker.dry_run
-        warner = kwargs.get('warner')
-        lib_only = kwargs.get('lib_only', False)
-        bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
-
-        pathname = os.path.join(self.dirname, self.filename)
-        name_ver = '%s-%s' % (self.name, self.version)
-        data_dir = '%s.data' % name_ver
-        info_dir = '%s.dist-info' % name_ver
-
-        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
-        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
-        record_name = posixpath.join(info_dir, 'RECORD')
-
-        wrapper = codecs.getreader('utf-8')
-
-        with ZipFile(pathname, 'r') as zf:
-            with zf.open(wheel_metadata_name) as bwf:
-                wf = wrapper(bwf)
-                message = message_from_file(wf)
-            wv = message['Wheel-Version'].split('.', 1)
-            file_version = tuple([int(i) for i in wv])
-            if (file_version != self.wheel_version) and warner:
-                warner(self.wheel_version, file_version)
-
-            if message['Root-Is-Purelib'] == 'true':
-                libdir = paths['purelib']
-            else:
-                libdir = paths['platlib']
-
-            records = {}
-            with zf.open(record_name) as bf:
-                with CSVReader(stream=bf) as reader:
-                    for row in reader:
-                        p = row[0]
-                        records[p] = row
-
-            data_pfx = posixpath.join(data_dir, '')
-            info_pfx = posixpath.join(info_dir, '')
-            script_pfx = posixpath.join(data_dir, 'scripts', '')
-
-            # make a new instance rather than a copy of maker's,
-            # as we mutate it
-            fileop = FileOperator(dry_run=dry_run)
-            fileop.record = True    # so we can rollback if needed
-
-            bc = not sys.dont_write_bytecode    # Double negatives. Lovely!
-
-            outfiles = []   # for RECORD writing
-
-            # for script copying/shebang processing
-            workdir = tempfile.mkdtemp()
-            # set target dir later
-            # we default add_launchers to False, as the
-            # Python Launcher should be used instead
-            maker.source_dir = workdir
-            maker.target_dir = None
-            try:
-                for zinfo in zf.infolist():
-                    arcname = zinfo.filename
-                    if isinstance(arcname, text_type):
-                        u_arcname = arcname
-                    else:
-                        u_arcname = arcname.decode('utf-8')
-                    if self.skip_entry(u_arcname):
-                        continue
-                    row = records[u_arcname]
-                    if row[2] and str(zinfo.file_size) != row[2]:
-                        raise DistlibException('size mismatch for '
-                                               '%s' % u_arcname)
-                    if row[1]:
-                        kind, value = row[1].split('=', 1)
-                        with zf.open(arcname) as bf:
-                            data = bf.read()
-                        _, digest = self.get_hash(data, kind)
-                        if digest != value:
-                            raise DistlibException('digest mismatch for '
-                                                   '%s' % arcname)
-
-                    if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
-                        logger.debug('lib_only: skipping %s', u_arcname)
-                        continue
-                    is_script = (u_arcname.startswith(script_pfx)
-                                 and not u_arcname.endswith('.exe'))
-
-                    if u_arcname.startswith(data_pfx):
-                        _, where, rp = u_arcname.split('/', 2)
-                        outfile = os.path.join(paths[where], convert_path(rp))
-                    else:
-                        # meant for site-packages.
-                        if u_arcname in (wheel_metadata_name, record_name):
-                            continue
-                        outfile = os.path.join(libdir, convert_path(u_arcname))
-                    if not is_script:
-                        with zf.open(arcname) as bf:
-                            fileop.copy_stream(bf, outfile)
-                        outfiles.append(outfile)
-                        # Double check the digest of the written file
-                        if not dry_run and row[1]:
-                            with open(outfile, 'rb') as bf:
-                                data = bf.read()
-                                _, newdigest = self.get_hash(data, kind)
-                                if newdigest != digest:
-                                    raise DistlibException('digest mismatch '
-                                                           'on write for '
-                                                           '%s' % outfile)
-                        if bc and outfile.endswith('.py'):
-                            try:
-                                pyc = fileop.byte_compile(outfile,
-                                                          hashed_invalidation=bc_hashed_invalidation)
-                                outfiles.append(pyc)
-                            except Exception:
-                                # Don't give up if byte-compilation fails,
-                                # but log it and perhaps warn the user
-                                logger.warning('Byte-compilation failed',
-                                               exc_info=True)
-                    else:
-                        fn = os.path.basename(convert_path(arcname))
-                        workname = os.path.join(workdir, fn)
-                        with zf.open(arcname) as bf:
-                            fileop.copy_stream(bf, workname)
-
-                        dn, fn = os.path.split(outfile)
-                        maker.target_dir = dn
-                        filenames = maker.make(fn)
-                        fileop.set_executable_mode(filenames)
-                        outfiles.extend(filenames)
-
-                if lib_only:
-                    logger.debug('lib_only: returning None')
-                    dist = None
-                else:
-                    # Generate scripts
-
-                    # Try to get pydist.json so we can see if there are
-                    # any commands to generate. If this fails (e.g. because
-                    # of a legacy wheel), log a warning but don't give up.
-                    commands = None
-                    file_version = self.info['Wheel-Version']
-                    if file_version == '1.0':
-                        # Use legacy info
-                        ep = posixpath.join(info_dir, 'entry_points.txt')
-                        try:
-                            with zf.open(ep) as bwf:
-                                epdata = read_exports(bwf)
-                            commands = {}
-                            for key in ('console', 'gui'):
-                                k = '%s_scripts' % key
-                                if k in epdata:
-                                    commands['wrap_%s' % key] = d = {}
-                                    for v in epdata[k].values():
-                                        s = '%s:%s' % (v.prefix, v.suffix)
-                                        if v.flags:
-                                            s += ' %s' % v.flags
-                                        d[v.name] = s
-                        except Exception:
-                            logger.warning('Unable to read legacy script '
-                                           'metadata, so cannot generate '
-                                           'scripts')
-                    else:
-                        try:
-                            with zf.open(metadata_name) as bwf:
-                                wf = wrapper(bwf)
-                                commands = json.load(wf).get('extensions')
-                                if commands:
-                                    commands = commands.get('python.commands')
-                        except Exception:
-                            logger.warning('Unable to read JSON metadata, so '
-                                           'cannot generate scripts')
-                    if commands:
-                        console_scripts = commands.get('wrap_console', {})
-                        gui_scripts = commands.get('wrap_gui', {})
-                        if console_scripts or gui_scripts:
-                            script_dir = paths.get('scripts', '')
-                            if not os.path.isdir(script_dir):
-                                raise ValueError('Valid script path not '
-                                                 'specified')
-                            maker.target_dir = script_dir
-                            for k, v in console_scripts.items():
-                                script = '%s = %s' % (k, v)
-                                filenames = maker.make(script)
-                                fileop.set_executable_mode(filenames)
-
-                            if gui_scripts:
-                                options = {'gui': True }
-                                for k, v in gui_scripts.items():
-                                    script = '%s = %s' % (k, v)
-                                    filenames = maker.make(script, options)
-                                    fileop.set_executable_mode(filenames)
-
-                    p = os.path.join(libdir, info_dir)
-                    dist = InstalledDistribution(p)
-
-                    # Write SHARED
-                    paths = dict(paths)     # don't change passed in dict
-                    del paths['purelib']
-                    del paths['platlib']
-                    paths['lib'] = libdir
-                    p = dist.write_shared_locations(paths, dry_run)
-                    if p:
-                        outfiles.append(p)
-
-                    # Write RECORD
-                    dist.write_installed_files(outfiles, paths['prefix'],
-                                               dry_run)
-                return dist
-            except Exception:  # pragma: no cover
-                logger.exception('installation failed.')
-                fileop.rollback()
-                raise
-            finally:
-                shutil.rmtree(workdir)
-
-    def _get_dylib_cache(self):
-        global cache
-        if cache is None:
-            # Use native string to avoid issues on 2.x: see Python #20140.
-            base = os.path.join(get_cache_base(), str('dylib-cache'),
-                                sys.version[:3])
-            cache = Cache(base)
-        return cache
-
-    def _get_extensions(self):
-        pathname = os.path.join(self.dirname, self.filename)
-        name_ver = '%s-%s' % (self.name, self.version)
-        info_dir = '%s.dist-info' % name_ver
-        arcname = posixpath.join(info_dir, 'EXTENSIONS')
-        wrapper = codecs.getreader('utf-8')
-        result = []
-        with ZipFile(pathname, 'r') as zf:
-            try:
-                with zf.open(arcname) as bf:
-                    wf = wrapper(bf)
-                    extensions = json.load(wf)
-                    cache = self._get_dylib_cache()
-                    prefix = cache.prefix_to_dir(pathname)
-                    cache_base = os.path.join(cache.base, prefix)
-                    if not os.path.isdir(cache_base):
-                        os.makedirs(cache_base)
-                    for name, relpath in extensions.items():
-                        dest = os.path.join(cache_base, convert_path(relpath))
-                        if not os.path.exists(dest):
-                            extract = True
-                        else:
-                            file_time = os.stat(dest).st_mtime
-                            file_time = datetime.datetime.fromtimestamp(file_time)
-                            info = zf.getinfo(relpath)
-                            wheel_time = datetime.datetime(*info.date_time)
-                            extract = wheel_time > file_time
-                        if extract:
-                            zf.extract(relpath, cache_base)
-                        result.append((name, dest))
-            except KeyError:
-                pass
-        return result
-
-    def is_compatible(self):
-        """
-        Determine if a wheel is compatible with the running system.
-        """
-        return is_compatible(self)
-
-    def is_mountable(self):
-        """
-        Determine if a wheel is asserted as mountable by its metadata.
-        """
-        return True # for now - metadata details TBD
-
-    def mount(self, append=False):
-        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
-        if not self.is_compatible():
-            msg = 'Wheel %s not compatible with this Python.' % pathname
-            raise DistlibException(msg)
-        if not self.is_mountable():
-            msg = 'Wheel %s is marked as not mountable.' % pathname
-            raise DistlibException(msg)
-        if pathname in sys.path:
-            logger.debug('%s already in path', pathname)
-        else:
-            if append:
-                sys.path.append(pathname)
-            else:
-                sys.path.insert(0, pathname)
-            extensions = self._get_extensions()
-            if extensions:
-                if _hook not in sys.meta_path:
-                    sys.meta_path.append(_hook)
-                _hook.add(pathname, extensions)
-
-    def unmount(self):
-        pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
-        if pathname not in sys.path:
-            logger.debug('%s not in path', pathname)
-        else:
-            sys.path.remove(pathname)
-            if pathname in _hook.impure_wheels:
-                _hook.remove(pathname)
-            if not _hook.impure_wheels:
-                if _hook in sys.meta_path:
-                    sys.meta_path.remove(_hook)
-
-    def verify(self):
-        pathname = os.path.join(self.dirname, self.filename)
-        name_ver = '%s-%s' % (self.name, self.version)
-        data_dir = '%s.data' % name_ver
-        info_dir = '%s.dist-info' % name_ver
-
-        metadata_name = posixpath.join(info_dir, METADATA_FILENAME)
-        wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
-        record_name = posixpath.join(info_dir, 'RECORD')
-
-        wrapper = codecs.getreader('utf-8')
-
-        with ZipFile(pathname, 'r') as zf:
-            with zf.open(wheel_metadata_name) as bwf:
-                wf = wrapper(bwf)
-                message = message_from_file(wf)
-            wv = message['Wheel-Version'].split('.', 1)
-            file_version = tuple([int(i) for i in wv])
-            # TODO version verification
-
-            records = {}
-            with zf.open(record_name) as bf:
-                with CSVReader(stream=bf) as reader:
-                    for row in reader:
-                        p = row[0]
-                        records[p] = row
-
-            for zinfo in zf.infolist():
-                arcname = zinfo.filename
-                if isinstance(arcname, text_type):
-                    u_arcname = arcname
-                else:
-                    u_arcname = arcname.decode('utf-8')
-                # See issue #115: some wheels have .. in their entries, but
-                # in the filename ... e.g. __main__..py ! So the check is
-                # updated to look for .. in the directory portions
-                p = u_arcname.split('/')
-                if '..' in p:
-                    raise DistlibException('invalid entry in '
-                                           'wheel: %r' % u_arcname)
-
-                if self.skip_entry(u_arcname):
-                    continue
-                row = records[u_arcname]
-                if row[2] and str(zinfo.file_size) != row[2]:
-                    raise DistlibException('size mismatch for '
-                                           '%s' % u_arcname)
-                if row[1]:
-                    kind, value = row[1].split('=', 1)
-                    with zf.open(arcname) as bf:
-                        data = bf.read()
-                    _, digest = self.get_hash(data, kind)
-                    if digest != value:
-                        raise DistlibException('digest mismatch for '
-                                               '%s' % arcname)
-
-    def update(self, modifier, dest_dir=None, **kwargs):
-        """
-        Update the contents of a wheel in a generic way. The modifier should
-        be a callable which expects a dictionary argument: its keys are
-        archive-entry paths, and its values are absolute filesystem paths
-        where the contents the corresponding archive entries can be found. The
-        modifier is free to change the contents of the files pointed to, add
-        new entries and remove entries, before returning. This method will
-        extract the entire contents of the wheel to a temporary location, call
-        the modifier, and then use the passed (and possibly updated)
-        dictionary to write a new wheel. If ``dest_dir`` is specified, the new
-        wheel is written there -- otherwise, the original wheel is overwritten.
-
-        The modifier should return True if it updated the wheel, else False.
-        This method returns the same value the modifier returns.
-        """
-
-        def get_version(path_map, info_dir):
-            version = path = None
-            key = '%s/%s' % (info_dir, METADATA_FILENAME)
-            if key not in path_map:
-                key = '%s/PKG-INFO' % info_dir
-            if key in path_map:
-                path = path_map[key]
-                version = Metadata(path=path).version
-            return version, path
-
-        def update_version(version, path):
-            updated = None
-            try:
-                v = NormalizedVersion(version)
-                i = version.find('-')
-                if i < 0:
-                    updated = '%s+1' % version
-                else:
-                    parts = [int(s) for s in version[i + 1:].split('.')]
-                    parts[-1] += 1
-                    updated = '%s+%s' % (version[:i],
-                                         '.'.join(str(i) for i in parts))
-            except UnsupportedVersionError:
-                logger.debug('Cannot update non-compliant (PEP-440) '
-                             'version %r', version)
-            if updated:
-                md = Metadata(path=path)
-                md.version = updated
-                legacy = not path.endswith(METADATA_FILENAME)
-                md.write(path=path, legacy=legacy)
-                logger.debug('Version updated from %r to %r', version,
-                             updated)
-
-        pathname = os.path.join(self.dirname, self.filename)
-        name_ver = '%s-%s' % (self.name, self.version)
-        info_dir = '%s.dist-info' % name_ver
-        record_name = posixpath.join(info_dir, 'RECORD')
-        with tempdir() as workdir:
-            with ZipFile(pathname, 'r') as zf:
-                path_map = {}
-                for zinfo in zf.infolist():
-                    arcname = zinfo.filename
-                    if isinstance(arcname, text_type):
-                        u_arcname = arcname
-                    else:
-                        u_arcname = arcname.decode('utf-8')
-                    if u_arcname == record_name:
-                        continue
-                    if '..' in u_arcname:
-                        raise DistlibException('invalid entry in '
-                                               'wheel: %r' % u_arcname)
-                    zf.extract(zinfo, workdir)
-                    path = os.path.join(workdir, convert_path(u_arcname))
-                    path_map[u_arcname] = path
-
-            # Remember the version.
-            original_version, _ = get_version(path_map, info_dir)
-            # Files extracted. Call the modifier.
-            modified = modifier(path_map, **kwargs)
-            if modified:
-                # Something changed - need to build a new wheel.
-                current_version, path = get_version(path_map, info_dir)
-                if current_version and (current_version == original_version):
-                    # Add or update local version to signify changes.
-                    update_version(current_version, path)
-                # Decide where the new wheel goes.
-                if dest_dir is None:
-                    fd, newpath = tempfile.mkstemp(suffix='.whl',
-                                                   prefix='wheel-update-',
-                                                   dir=workdir)
-                    os.close(fd)
-                else:
-                    if not os.path.isdir(dest_dir):
-                        raise DistlibException('Not a directory: %r' % dest_dir)
-                    newpath = os.path.join(dest_dir, self.filename)
-                archive_paths = list(path_map.items())
-                distinfo = os.path.join(workdir, info_dir)
-                info = distinfo, info_dir
-                self.write_records(info, workdir, archive_paths)
-                self.build_zip(newpath, archive_paths)
-                if dest_dir is None:
-                    shutil.copyfile(newpath, pathname)
-        return modified
-
-def compatible_tags():
-    """
-    Return (pyver, abi, arch) tuples compatible with this Python.
-    """
-    versions = [VER_SUFFIX]
-    major = VER_SUFFIX[0]
-    for minor in range(sys.version_info[1] - 1, - 1, -1):
-        versions.append(''.join([major, str(minor)]))
-
-    abis = []
-    for suffix, _, _ in imp.get_suffixes():
-        if suffix.startswith('.abi'):
-            abis.append(suffix.split('.', 2)[1])
-    abis.sort()
-    if ABI != 'none':
-        abis.insert(0, ABI)
-    abis.append('none')
-    result = []
-
-    arches = [ARCH]
-    if sys.platform == 'darwin':
-        m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
-        if m:
-            name, major, minor, arch = m.groups()
-            minor = int(minor)
-            matches = [arch]
-            if arch in ('i386', 'ppc'):
-                matches.append('fat')
-            if arch in ('i386', 'ppc', 'x86_64'):
-                matches.append('fat3')
-            if arch in ('ppc64', 'x86_64'):
-                matches.append('fat64')
-            if arch in ('i386', 'x86_64'):
-                matches.append('intel')
-            if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
-                matches.append('universal')
-            while minor >= 0:
-                for match in matches:
-                    s = '%s_%s_%s_%s' % (name, major, minor, match)
-                    if s != ARCH:   # already there
-                        arches.append(s)
-                minor -= 1
-
-    # Most specific - our Python version, ABI and arch
-    for abi in abis:
-        for arch in arches:
-            result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
-
-    # where no ABI / arch dependency, but IMP_PREFIX dependency
-    for i, version in enumerate(versions):
-        result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
-        if i == 0:
-            result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
-
-    # no IMP_PREFIX, ABI or arch dependency
-    for i, version in enumerate(versions):
-        result.append((''.join(('py', version)), 'none', 'any'))
-        if i == 0:
-            result.append((''.join(('py', version[0])), 'none', 'any'))
-    return set(result)
-
-
-COMPATIBLE_TAGS = compatible_tags()
-
-del compatible_tags
-
-
-def is_compatible(wheel, tags=None):
-    if not isinstance(wheel, Wheel):
-        wheel = Wheel(wheel)    # assume it's a filename
-    result = False
-    if tags is None:
-        tags = COMPATIBLE_TAGS
-    for ver, abi, arch in tags:
-        if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
-            result = True
-            break
-    return result
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distro.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distro.py
deleted file mode 100644
index 3306163..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/distro.py
+++ /dev/null
@@ -1,1216 +0,0 @@
-# Copyright 2015,2016,2017 Nir Cohen
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-The ``distro`` package (``distro`` stands for Linux Distribution) provides
-information about the Linux distribution it runs on, such as a reliable
-machine-readable distro ID, or version information.
-
-It is the recommended replacement for Python's original
-:py:func:`platform.linux_distribution` function, but it provides much more
-functionality. An alternative implementation became necessary because Python
-3.5 deprecated this function, and Python 3.8 will remove it altogether.
-Its predecessor function :py:func:`platform.dist` was already
-deprecated since Python 2.6 and will also be removed in Python 3.8.
-Still, there are many cases in which access to OS distribution information
-is needed. See `Python issue 1322 `_ for
-more information.
-"""
-
-import os
-import re
-import sys
-import json
-import shlex
-import logging
-import argparse
-import subprocess
-
-
-_UNIXCONFDIR = os.environ.get('UNIXCONFDIR', '/etc')
-_OS_RELEASE_BASENAME = 'os-release'
-
-#: Translation table for normalizing the "ID" attribute defined in os-release
-#: files, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as defined in the os-release file, translated to lower case,
-#:   with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_OS_ID = {
-    'ol': 'oracle',  # Oracle Enterprise Linux
-}
-
-#: Translation table for normalizing the "Distributor ID" attribute returned by
-#: the lsb_release command, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as returned by the lsb_release command, translated to lower
-#:   case, with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_LSB_ID = {
-    'enterpriseenterprise': 'oracle',  # Oracle Enterprise Linux
-    'redhatenterpriseworkstation': 'rhel',  # RHEL 6, 7 Workstation
-    'redhatenterpriseserver': 'rhel',  # RHEL 6, 7 Server
-}
-
-#: Translation table for normalizing the distro ID derived from the file name
-#: of distro release files, for use by the :func:`distro.id` method.
-#:
-#: * Key: Value as derived from the file name of a distro release file,
-#:   translated to lower case, with blanks translated to underscores.
-#:
-#: * Value: Normalized value.
-NORMALIZED_DISTRO_ID = {
-    'redhat': 'rhel',  # RHEL 6.x, 7.x
-}
-
-# Pattern for content of distro release file (reversed)
-_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
-    r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')
-
-# Pattern for base file name of distro release file
-_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
-    r'(\w+)[-_](release|version)$')
-
-# Base file names to be ignored when searching for distro release file
-_DISTRO_RELEASE_IGNORE_BASENAMES = (
-    'debian_version',
-    'lsb-release',
-    'oem-release',
-    _OS_RELEASE_BASENAME,
-    'system-release'
-)
-
-
-def linux_distribution(full_distribution_name=True):
-    """
-    Return information about the current OS distribution as a tuple
-    ``(id_name, version, codename)`` with items as follows:
-
-    * ``id_name``:  If *full_distribution_name* is false, the result of
-      :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
-
-    * ``version``:  The result of :func:`distro.version`.
-
-    * ``codename``:  The result of :func:`distro.codename`.
-
-    The interface of this function is compatible with the original
-    :py:func:`platform.linux_distribution` function, supporting a subset of
-    its parameters.
-
-    The data it returns may not exactly be the same, because it uses more data
-    sources than the original function, and that may lead to different data if
-    the OS distribution is not consistent across multiple data sources it
-    provides (there are indeed such distributions ...).
-
-    Another reason for differences is the fact that the :func:`distro.id`
-    method normalizes the distro ID string to a reliable machine-readable value
-    for a number of popular OS distributions.
-    """
-    return _distro.linux_distribution(full_distribution_name)
-
-
-def id():
-    """
-    Return the distro ID of the current distribution, as a
-    machine-readable string.
-
-    For a number of OS distributions, the returned distro ID value is
-    *reliable*, in the sense that it is documented and that it does not change
-    across releases of the distribution.
-
-    This package maintains the following reliable distro ID values:
-
-    ==============  =========================================
-    Distro ID       Distribution
-    ==============  =========================================
-    "ubuntu"        Ubuntu
-    "debian"        Debian
-    "rhel"          RedHat Enterprise Linux
-    "centos"        CentOS
-    "fedora"        Fedora
-    "sles"          SUSE Linux Enterprise Server
-    "opensuse"      openSUSE
-    "amazon"        Amazon Linux
-    "arch"          Arch Linux
-    "cloudlinux"    CloudLinux OS
-    "exherbo"       Exherbo Linux
-    "gentoo"        GenToo Linux
-    "ibm_powerkvm"  IBM PowerKVM
-    "kvmibm"        KVM for IBM z Systems
-    "linuxmint"     Linux Mint
-    "mageia"        Mageia
-    "mandriva"      Mandriva Linux
-    "parallels"     Parallels
-    "pidora"        Pidora
-    "raspbian"      Raspbian
-    "oracle"        Oracle Linux (and Oracle Enterprise Linux)
-    "scientific"    Scientific Linux
-    "slackware"     Slackware
-    "xenserver"     XenServer
-    "openbsd"       OpenBSD
-    "netbsd"        NetBSD
-    "freebsd"       FreeBSD
-    ==============  =========================================
-
-    If you have a need to get distros for reliable IDs added into this set,
-    or if you find that the :func:`distro.id` function returns a different
-    distro ID for one of the listed distros, please create an issue in the
-    `distro issue tracker`_.
-
-    **Lookup hierarchy and transformations:**
-
-    First, the ID is obtained from the following sources, in the specified
-    order. The first available and non-empty value is used:
-
-    * the value of the "ID" attribute of the os-release file,
-
-    * the value of the "Distributor ID" attribute returned by the lsb_release
-      command,
-
-    * the first part of the file name of the distro release file,
-
-    The so determined ID value then passes the following transformations,
-    before it is returned by this method:
-
-    * it is translated to lower case,
-
-    * blanks (which should not be there anyway) are translated to underscores,
-
-    * a normalization of the ID is performed, based upon
-      `normalization tables`_. The purpose of this normalization is to ensure
-      that the ID is as reliable as possible, even across incompatible changes
-      in the OS distributions. A common reason for an incompatible change is
-      the addition of an os-release file, or the addition of the lsb_release
-      command, with ID values that differ from what was previously determined
-      from the distro release file name.
-    """
-    return _distro.id()
-
-
-def name(pretty=False):
-    """
-    Return the name of the current OS distribution, as a human-readable
-    string.
-
-    If *pretty* is false, the name is returned without version or codename.
-    (e.g. "CentOS Linux")
-
-    If *pretty* is true, the version and codename are appended.
-    (e.g. "CentOS Linux 7.1.1503 (Core)")
-
-    **Lookup hierarchy:**
-
-    The name is obtained from the following sources, in the specified order.
-    The first available and non-empty value is used:
-
-    * If *pretty* is false:
-
-      - the value of the "NAME" attribute of the os-release file,
-
-      - the value of the "Distributor ID" attribute returned by the lsb_release
-        command,
-
-      - the value of the "" field of the distro release file.
-
-    * If *pretty* is true:
-
-      - the value of the "PRETTY_NAME" attribute of the os-release file,
-
-      - the value of the "Description" attribute returned by the lsb_release
-        command,
-
-      - the value of the "" field of the distro release file, appended
-        with the value of the pretty version ("" and ""
-        fields) of the distro release file, if available.
-    """
-    return _distro.name(pretty)
-
-
-def version(pretty=False, best=False):
-    """
-    Return the version of the current OS distribution, as a human-readable
-    string.
-
-    If *pretty* is false, the version is returned without codename (e.g.
-    "7.0").
-
-    If *pretty* is true, the codename in parenthesis is appended, if the
-    codename is non-empty (e.g. "7.0 (Maipo)").
-
-    Some distributions provide version numbers with different precisions in
-    the different sources of distribution information. Examining the different
-    sources in a fixed priority order does not always yield the most precise
-    version (e.g. for Debian 8.2, or CentOS 7.1).
-
-    The *best* parameter can be used to control the approach for the returned
-    version:
-
-    If *best* is false, the first non-empty version number in priority order of
-    the examined sources is returned.
-
-    If *best* is true, the most precise version number out of all examined
-    sources is returned.
-
-    **Lookup hierarchy:**
-
-    In all cases, the version number is obtained from the following sources.
-    If *best* is false, this order represents the priority order:
-
-    * the value of the "VERSION_ID" attribute of the os-release file,
-    * the value of the "Release" attribute returned by the lsb_release
-      command,
-    * the version number parsed from the "" field of the first line
-      of the distro release file,
-    * the version number parsed from the "PRETTY_NAME" attribute of the
-      os-release file, if it follows the format of the distro release files.
-    * the version number parsed from the "Description" attribute returned by
-      the lsb_release command, if it follows the format of the distro release
-      files.
-    """
-    return _distro.version(pretty, best)
-
-
-def version_parts(best=False):
-    """
-    Return the version of the current OS distribution as a tuple
-    ``(major, minor, build_number)`` with items as follows:
-
-    * ``major``:  The result of :func:`distro.major_version`.
-
-    * ``minor``:  The result of :func:`distro.minor_version`.
-
-    * ``build_number``:  The result of :func:`distro.build_number`.
-
-    For a description of the *best* parameter, see the :func:`distro.version`
-    method.
-    """
-    return _distro.version_parts(best)
-
-
-def major_version(best=False):
-    """
-    Return the major version of the current OS distribution, as a string,
-    if provided.
-    Otherwise, the empty string is returned. The major version is the first
-    part of the dot-separated version string.
-
-    For a description of the *best* parameter, see the :func:`distro.version`
-    method.
-    """
-    return _distro.major_version(best)
-
-
-def minor_version(best=False):
-    """
-    Return the minor version of the current OS distribution, as a string,
-    if provided.
-    Otherwise, the empty string is returned. The minor version is the second
-    part of the dot-separated version string.
-
-    For a description of the *best* parameter, see the :func:`distro.version`
-    method.
-    """
-    return _distro.minor_version(best)
-
-
-def build_number(best=False):
-    """
-    Return the build number of the current OS distribution, as a string,
-    if provided.
-    Otherwise, the empty string is returned. The build number is the third part
-    of the dot-separated version string.
-
-    For a description of the *best* parameter, see the :func:`distro.version`
-    method.
-    """
-    return _distro.build_number(best)
-
-
-def like():
-    """
-    Return a space-separated list of distro IDs of distributions that are
-    closely related to the current OS distribution in regards to packaging
-    and programming interfaces, for example distributions the current
-    distribution is a derivative from.
-
-    **Lookup hierarchy:**
-
-    This information item is only provided by the os-release file.
-    For details, see the description of the "ID_LIKE" attribute in the
-    `os-release man page
-    `_.
-    """
-    return _distro.like()
-
-
-def codename():
-    """
-    Return the codename for the release of the current OS distribution,
-    as a string.
-
-    If the distribution does not have a codename, an empty string is returned.
-
-    Note that the returned codename is not always really a codename. For
-    example, openSUSE returns "x86_64". This function does not handle such
-    cases in any special way and just returns the string it finds, if any.
-
-    **Lookup hierarchy:**
-
-    * the codename within the "VERSION" attribute of the os-release file, if
-      provided,
-
-    * the value of the "Codename" attribute returned by the lsb_release
-      command,
-
-    * the value of the "" field of the distro release file.
-    """
-    return _distro.codename()
-
-
-def info(pretty=False, best=False):
-    """
-    Return certain machine-readable information items about the current OS
-    distribution in a dictionary, as shown in the following example:
-
-    .. sourcecode:: python
-
-        {
-            'id': 'rhel',
-            'version': '7.0',
-            'version_parts': {
-                'major': '7',
-                'minor': '0',
-                'build_number': ''
-            },
-            'like': 'fedora',
-            'codename': 'Maipo'
-        }
-
-    The dictionary structure and keys are always the same, regardless of which
-    information items are available in the underlying data sources. The values
-    for the various keys are as follows:
-
-    * ``id``:  The result of :func:`distro.id`.
-
-    * ``version``:  The result of :func:`distro.version`.
-
-    * ``version_parts -> major``:  The result of :func:`distro.major_version`.
-
-    * ``version_parts -> minor``:  The result of :func:`distro.minor_version`.
-
-    * ``version_parts -> build_number``:  The result of
-      :func:`distro.build_number`.
-
-    * ``like``:  The result of :func:`distro.like`.
-
-    * ``codename``:  The result of :func:`distro.codename`.
-
-    For a description of the *pretty* and *best* parameters, see the
-    :func:`distro.version` method.
-    """
-    return _distro.info(pretty, best)
-
-
-def os_release_info():
-    """
-    Return a dictionary containing key-value pairs for the information items
-    from the os-release file data source of the current OS distribution.
-
-    See `os-release file`_ for details about these information items.
-    """
-    return _distro.os_release_info()
-
-
-def lsb_release_info():
-    """
-    Return a dictionary containing key-value pairs for the information items
-    from the lsb_release command data source of the current OS distribution.
-
-    See `lsb_release command output`_ for details about these information
-    items.
-    """
-    return _distro.lsb_release_info()
-
-
-def distro_release_info():
-    """
-    Return a dictionary containing key-value pairs for the information items
-    from the distro release file data source of the current OS distribution.
-
-    See `distro release file`_ for details about these information items.
-    """
-    return _distro.distro_release_info()
-
-
-def uname_info():
-    """
-    Return a dictionary containing key-value pairs for the information items
-    from the distro release file data source of the current OS distribution.
-    """
-    return _distro.uname_info()
-
-
-def os_release_attr(attribute):
-    """
-    Return a single named information item from the os-release file data source
-    of the current OS distribution.
-
-    Parameters:
-
-    * ``attribute`` (string): Key of the information item.
-
-    Returns:
-
-    * (string): Value of the information item, if the item exists.
-      The empty string, if the item does not exist.
-
-    See `os-release file`_ for details about these information items.
-    """
-    return _distro.os_release_attr(attribute)
-
-
-def lsb_release_attr(attribute):
-    """
-    Return a single named information item from the lsb_release command output
-    data source of the current OS distribution.
-
-    Parameters:
-
-    * ``attribute`` (string): Key of the information item.
-
-    Returns:
-
-    * (string): Value of the information item, if the item exists.
-      The empty string, if the item does not exist.
-
-    See `lsb_release command output`_ for details about these information
-    items.
-    """
-    return _distro.lsb_release_attr(attribute)
-
-
-def distro_release_attr(attribute):
-    """
-    Return a single named information item from the distro release file
-    data source of the current OS distribution.
-
-    Parameters:
-
-    * ``attribute`` (string): Key of the information item.
-
-    Returns:
-
-    * (string): Value of the information item, if the item exists.
-      The empty string, if the item does not exist.
-
-    See `distro release file`_ for details about these information items.
-    """
-    return _distro.distro_release_attr(attribute)
-
-
-def uname_attr(attribute):
-    """
-    Return a single named information item from the distro release file
-    data source of the current OS distribution.
-
-    Parameters:
-
-    * ``attribute`` (string): Key of the information item.
-
-    Returns:
-
-    * (string): Value of the information item, if the item exists.
-                The empty string, if the item does not exist.
-    """
-    return _distro.uname_attr(attribute)
-
-
-class cached_property(object):
-    """A version of @property which caches the value.  On access, it calls the
-    underlying function and sets the value in `__dict__` so future accesses
-    will not re-call the property.
-    """
-    def __init__(self, f):
-        self._fname = f.__name__
-        self._f = f
-
-    def __get__(self, obj, owner):
-        assert obj is not None, 'call {} on an instance'.format(self._fname)
-        ret = obj.__dict__[self._fname] = self._f(obj)
-        return ret
-
-
-class LinuxDistribution(object):
-    """
-    Provides information about a OS distribution.
-
-    This package creates a private module-global instance of this class with
-    default initialization arguments, that is used by the
-    `consolidated accessor functions`_ and `single source accessor functions`_.
-    By using default initialization arguments, that module-global instance
-    returns data about the current OS distribution (i.e. the distro this
-    package runs on).
-
-    Normally, it is not necessary to create additional instances of this class.
-    However, in situations where control is needed over the exact data sources
-    that are used, instances of this class can be created with a specific
-    distro release file, or a specific os-release file, or without invoking the
-    lsb_release command.
-    """
-
-    def __init__(self,
-                 include_lsb=True,
-                 os_release_file='',
-                 distro_release_file='',
-                 include_uname=True):
-        """
-        The initialization method of this class gathers information from the
-        available data sources, and stores that in private instance attributes.
-        Subsequent access to the information items uses these private instance
-        attributes, so that the data sources are read only once.
-
-        Parameters:
-
-        * ``include_lsb`` (bool): Controls whether the
-          `lsb_release command output`_ is included as a data source.
-
-          If the lsb_release command is not available in the program execution
-          path, the data source for the lsb_release command will be empty.
-
-        * ``os_release_file`` (string): The path name of the
-          `os-release file`_ that is to be used as a data source.
-
-          An empty string (the default) will cause the default path name to
-          be used (see `os-release file`_ for details).
-
-          If the specified or defaulted os-release file does not exist, the
-          data source for the os-release file will be empty.
-
-        * ``distro_release_file`` (string): The path name of the
-          `distro release file`_ that is to be used as a data source.
-
-          An empty string (the default) will cause a default search algorithm
-          to be used (see `distro release file`_ for details).
-
-          If the specified distro release file does not exist, or if no default
-          distro release file can be found, the data source for the distro
-          release file will be empty.
-
-        * ``include_name`` (bool): Controls whether uname command output is
-          included as a data source. If the uname command is not available in
-          the program execution path the data source for the uname command will
-          be empty.
-
-        Public instance attributes:
-
-        * ``os_release_file`` (string): The path name of the
-          `os-release file`_ that is actually used as a data source. The
-          empty string if no distro release file is used as a data source.
-
-        * ``distro_release_file`` (string): The path name of the
-          `distro release file`_ that is actually used as a data source. The
-          empty string if no distro release file is used as a data source.
-
-        * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
-          This controls whether the lsb information will be loaded.
-
-        * ``include_uname`` (bool): The result of the ``include_uname``
-          parameter. This controls whether the uname information will
-          be loaded.
-
-        Raises:
-
-        * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
-          release file.
-
-        * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
-          some issue (other than not being available in the program execution
-          path).
-
-        * :py:exc:`UnicodeError`: A data source has unexpected characters or
-          uses an unexpected encoding.
-        """
-        self.os_release_file = os_release_file or \
-            os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
-        self.distro_release_file = distro_release_file or ''  # updated later
-        self.include_lsb = include_lsb
-        self.include_uname = include_uname
-
-    def __repr__(self):
-        """Return repr of all info
-        """
-        return \
-            "LinuxDistribution(" \
-            "os_release_file={self.os_release_file!r}, " \
-            "distro_release_file={self.distro_release_file!r}, " \
-            "include_lsb={self.include_lsb!r}, " \
-            "include_uname={self.include_uname!r}, " \
-            "_os_release_info={self._os_release_info!r}, " \
-            "_lsb_release_info={self._lsb_release_info!r}, " \
-            "_distro_release_info={self._distro_release_info!r}, " \
-            "_uname_info={self._uname_info!r})".format(
-                self=self)
-
-    def linux_distribution(self, full_distribution_name=True):
-        """
-        Return information about the OS distribution that is compatible
-        with Python's :func:`platform.linux_distribution`, supporting a subset
-        of its parameters.
-
-        For details, see :func:`distro.linux_distribution`.
-        """
-        return (
-            self.name() if full_distribution_name else self.id(),
-            self.version(),
-            self.codename()
-        )
-
-    def id(self):
-        """Return the distro ID of the OS distribution, as a string.
-
-        For details, see :func:`distro.id`.
-        """
-        def normalize(distro_id, table):
-            distro_id = distro_id.lower().replace(' ', '_')
-            return table.get(distro_id, distro_id)
-
-        distro_id = self.os_release_attr('id')
-        if distro_id:
-            return normalize(distro_id, NORMALIZED_OS_ID)
-
-        distro_id = self.lsb_release_attr('distributor_id')
-        if distro_id:
-            return normalize(distro_id, NORMALIZED_LSB_ID)
-
-        distro_id = self.distro_release_attr('id')
-        if distro_id:
-            return normalize(distro_id, NORMALIZED_DISTRO_ID)
-
-        distro_id = self.uname_attr('id')
-        if distro_id:
-            return normalize(distro_id, NORMALIZED_DISTRO_ID)
-
-        return ''
-
-    def name(self, pretty=False):
-        """
-        Return the name of the OS distribution, as a string.
-
-        For details, see :func:`distro.name`.
-        """
-        name = self.os_release_attr('name') \
-            or self.lsb_release_attr('distributor_id') \
-            or self.distro_release_attr('name') \
-            or self.uname_attr('name')
-        if pretty:
-            name = self.os_release_attr('pretty_name') \
-                or self.lsb_release_attr('description')
-            if not name:
-                name = self.distro_release_attr('name') \
-                       or self.uname_attr('name')
-                version = self.version(pretty=True)
-                if version:
-                    name = name + ' ' + version
-        return name or ''
-
-    def version(self, pretty=False, best=False):
-        """
-        Return the version of the OS distribution, as a string.
-
-        For details, see :func:`distro.version`.
-        """
-        versions = [
-            self.os_release_attr('version_id'),
-            self.lsb_release_attr('release'),
-            self.distro_release_attr('version_id'),
-            self._parse_distro_release_content(
-                self.os_release_attr('pretty_name')).get('version_id', ''),
-            self._parse_distro_release_content(
-                self.lsb_release_attr('description')).get('version_id', ''),
-            self.uname_attr('release')
-        ]
-        version = ''
-        if best:
-            # This algorithm uses the last version in priority order that has
-            # the best precision. If the versions are not in conflict, that
-            # does not matter; otherwise, using the last one instead of the
-            # first one might be considered a surprise.
-            for v in versions:
-                if v.count(".") > version.count(".") or version == '':
-                    version = v
-        else:
-            for v in versions:
-                if v != '':
-                    version = v
-                    break
-        if pretty and version and self.codename():
-            version = u'{0} ({1})'.format(version, self.codename())
-        return version
-
-    def version_parts(self, best=False):
-        """
-        Return the version of the OS distribution, as a tuple of version
-        numbers.
-
-        For details, see :func:`distro.version_parts`.
-        """
-        version_str = self.version(best=best)
-        if version_str:
-            version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
-            matches = version_regex.match(version_str)
-            if matches:
-                major, minor, build_number = matches.groups()
-                return major, minor or '', build_number or ''
-        return '', '', ''
-
-    def major_version(self, best=False):
-        """
-        Return the major version number of the current distribution.
-
-        For details, see :func:`distro.major_version`.
-        """
-        return self.version_parts(best)[0]
-
-    def minor_version(self, best=False):
-        """
-        Return the minor version number of the current distribution.
-
-        For details, see :func:`distro.minor_version`.
-        """
-        return self.version_parts(best)[1]
-
-    def build_number(self, best=False):
-        """
-        Return the build number of the current distribution.
-
-        For details, see :func:`distro.build_number`.
-        """
-        return self.version_parts(best)[2]
-
-    def like(self):
-        """
-        Return the IDs of distributions that are like the OS distribution.
-
-        For details, see :func:`distro.like`.
-        """
-        return self.os_release_attr('id_like') or ''
-
-    def codename(self):
-        """
-        Return the codename of the OS distribution.
-
-        For details, see :func:`distro.codename`.
-        """
-        try:
-            # Handle os_release specially since distros might purposefully set
-            # this to empty string to have no codename
-            return self._os_release_info['codename']
-        except KeyError:
-            return self.lsb_release_attr('codename') \
-                or self.distro_release_attr('codename') \
-                or ''
-
-    def info(self, pretty=False, best=False):
-        """
-        Return certain machine-readable information about the OS
-        distribution.
-
-        For details, see :func:`distro.info`.
-        """
-        return dict(
-            id=self.id(),
-            version=self.version(pretty, best),
-            version_parts=dict(
-                major=self.major_version(best),
-                minor=self.minor_version(best),
-                build_number=self.build_number(best)
-            ),
-            like=self.like(),
-            codename=self.codename(),
-        )
-
-    def os_release_info(self):
-        """
-        Return a dictionary containing key-value pairs for the information
-        items from the os-release file data source of the OS distribution.
-
-        For details, see :func:`distro.os_release_info`.
-        """
-        return self._os_release_info
-
-    def lsb_release_info(self):
-        """
-        Return a dictionary containing key-value pairs for the information
-        items from the lsb_release command data source of the OS
-        distribution.
-
-        For details, see :func:`distro.lsb_release_info`.
-        """
-        return self._lsb_release_info
-
-    def distro_release_info(self):
-        """
-        Return a dictionary containing key-value pairs for the information
-        items from the distro release file data source of the OS
-        distribution.
-
-        For details, see :func:`distro.distro_release_info`.
-        """
-        return self._distro_release_info
-
-    def uname_info(self):
-        """
-        Return a dictionary containing key-value pairs for the information
-        items from the uname command data source of the OS distribution.
-
-        For details, see :func:`distro.uname_info`.
-        """
-        return self._uname_info
-
-    def os_release_attr(self, attribute):
-        """
-        Return a single named information item from the os-release file data
-        source of the OS distribution.
-
-        For details, see :func:`distro.os_release_attr`.
-        """
-        return self._os_release_info.get(attribute, '')
-
-    def lsb_release_attr(self, attribute):
-        """
-        Return a single named information item from the lsb_release command
-        output data source of the OS distribution.
-
-        For details, see :func:`distro.lsb_release_attr`.
-        """
-        return self._lsb_release_info.get(attribute, '')
-
-    def distro_release_attr(self, attribute):
-        """
-        Return a single named information item from the distro release file
-        data source of the OS distribution.
-
-        For details, see :func:`distro.distro_release_attr`.
-        """
-        return self._distro_release_info.get(attribute, '')
-
-    def uname_attr(self, attribute):
-        """
-        Return a single named information item from the uname command
-        output data source of the OS distribution.
-
-        For details, see :func:`distro.uname_release_attr`.
-        """
-        return self._uname_info.get(attribute, '')
-
-    @cached_property
-    def _os_release_info(self):
-        """
-        Get the information items from the specified os-release file.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        if os.path.isfile(self.os_release_file):
-            with open(self.os_release_file) as release_file:
-                return self._parse_os_release_content(release_file)
-        return {}
-
-    @staticmethod
-    def _parse_os_release_content(lines):
-        """
-        Parse the lines of an os-release file.
-
-        Parameters:
-
-        * lines: Iterable through the lines in the os-release file.
-                 Each line must be a unicode string or a UTF-8 encoded byte
-                 string.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        props = {}
-        lexer = shlex.shlex(lines, posix=True)
-        lexer.whitespace_split = True
-
-        # The shlex module defines its `wordchars` variable using literals,
-        # making it dependent on the encoding of the Python source file.
-        # In Python 2.6 and 2.7, the shlex source file is encoded in
-        # 'iso-8859-1', and the `wordchars` variable is defined as a byte
-        # string. This causes a UnicodeDecodeError to be raised when the
-        # parsed content is a unicode object. The following fix resolves that
-        # (... but it should be fixed in shlex...):
-        if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
-            lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
-
-        tokens = list(lexer)
-        for token in tokens:
-            # At this point, all shell-like parsing has been done (i.e.
-            # comments processed, quotes and backslash escape sequences
-            # processed, multi-line values assembled, trailing newlines
-            # stripped, etc.), so the tokens are now either:
-            # * variable assignments: var=value
-            # * commands or their arguments (not allowed in os-release)
-            if '=' in token:
-                k, v = token.split('=', 1)
-                if isinstance(v, bytes):
-                    v = v.decode('utf-8')
-                props[k.lower()] = v
-            else:
-                # Ignore any tokens that are not variable assignments
-                pass
-
-        if 'version_codename' in props:
-            # os-release added a version_codename field.  Use that in
-            # preference to anything else Note that some distros purposefully
-            # do not have code names.  They should be setting
-            # version_codename=""
-            props['codename'] = props['version_codename']
-        elif 'ubuntu_codename' in props:
-            # Same as above but a non-standard field name used on older Ubuntus
-            props['codename'] = props['ubuntu_codename']
-        elif 'version' in props:
-            # If there is no version_codename, parse it from the version
-            codename = re.search(r'(\(\D+\))|,(\s+)?\D+', props['version'])
-            if codename:
-                codename = codename.group()
-                codename = codename.strip('()')
-                codename = codename.strip(',')
-                codename = codename.strip()
-                # codename appears within paranthese.
-                props['codename'] = codename
-
-        return props
-
-    @cached_property
-    def _lsb_release_info(self):
-        """
-        Get the information items from the lsb_release command output.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        if not self.include_lsb:
-            return {}
-        with open(os.devnull, 'w') as devnull:
-            try:
-                cmd = ('lsb_release', '-a')
-                stdout = subprocess.check_output(cmd, stderr=devnull)
-            except OSError:  # Command not found
-                return {}
-        content = stdout.decode(sys.getfilesystemencoding()).splitlines()
-        return self._parse_lsb_release_content(content)
-
-    @staticmethod
-    def _parse_lsb_release_content(lines):
-        """
-        Parse the output of the lsb_release command.
-
-        Parameters:
-
-        * lines: Iterable through the lines of the lsb_release output.
-                 Each line must be a unicode string or a UTF-8 encoded byte
-                 string.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        props = {}
-        for line in lines:
-            kv = line.strip('\n').split(':', 1)
-            if len(kv) != 2:
-                # Ignore lines without colon.
-                continue
-            k, v = kv
-            props.update({k.replace(' ', '_').lower(): v.strip()})
-        return props
-
-    @cached_property
-    def _uname_info(self):
-        with open(os.devnull, 'w') as devnull:
-            try:
-                cmd = ('uname', '-rs')
-                stdout = subprocess.check_output(cmd, stderr=devnull)
-            except OSError:
-                return {}
-        content = stdout.decode(sys.getfilesystemencoding()).splitlines()
-        return self._parse_uname_content(content)
-
-    @staticmethod
-    def _parse_uname_content(lines):
-        props = {}
-        match = re.search(r'^([^\s]+)\s+([\d\.]+)', lines[0].strip())
-        if match:
-            name, version = match.groups()
-
-            # This is to prevent the Linux kernel version from
-            # appearing as the 'best' version on otherwise
-            # identifiable distributions.
-            if name == 'Linux':
-                return {}
-            props['id'] = name.lower()
-            props['name'] = name
-            props['release'] = version
-        return props
-
-    @cached_property
-    def _distro_release_info(self):
-        """
-        Get the information items from the specified distro release file.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        if self.distro_release_file:
-            # If it was specified, we use it and parse what we can, even if
-            # its file name or content does not match the expected pattern.
-            distro_info = self._parse_distro_release_file(
-                self.distro_release_file)
-            basename = os.path.basename(self.distro_release_file)
-            # The file name pattern for user-specified distro release files
-            # is somewhat more tolerant (compared to when searching for the
-            # file), because we want to use what was specified as best as
-            # possible.
-            match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
-            if 'name' in distro_info \
-               and 'cloudlinux' in distro_info['name'].lower():
-                distro_info['id'] = 'cloudlinux'
-            elif match:
-                distro_info['id'] = match.group(1)
-            return distro_info
-        else:
-            try:
-                basenames = os.listdir(_UNIXCONFDIR)
-                # We sort for repeatability in cases where there are multiple
-                # distro specific files; e.g. CentOS, Oracle, Enterprise all
-                # containing `redhat-release` on top of their own.
-                basenames.sort()
-            except OSError:
-                # This may occur when /etc is not readable but we can't be
-                # sure about the *-release files. Check common entries of
-                # /etc for information. If they turn out to not be there the
-                # error is handled in `_parse_distro_release_file()`.
-                basenames = ['SuSE-release',
-                             'arch-release',
-                             'base-release',
-                             'centos-release',
-                             'fedora-release',
-                             'gentoo-release',
-                             'mageia-release',
-                             'mandrake-release',
-                             'mandriva-release',
-                             'mandrivalinux-release',
-                             'manjaro-release',
-                             'oracle-release',
-                             'redhat-release',
-                             'sl-release',
-                             'slackware-version']
-            for basename in basenames:
-                if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
-                    continue
-                match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
-                if match:
-                    filepath = os.path.join(_UNIXCONFDIR, basename)
-                    distro_info = self._parse_distro_release_file(filepath)
-                    if 'name' in distro_info:
-                        # The name is always present if the pattern matches
-                        self.distro_release_file = filepath
-                        distro_info['id'] = match.group(1)
-                        if 'cloudlinux' in distro_info['name'].lower():
-                            distro_info['id'] = 'cloudlinux'
-                        return distro_info
-            return {}
-
-    def _parse_distro_release_file(self, filepath):
-        """
-        Parse a distro release file.
-
-        Parameters:
-
-        * filepath: Path name of the distro release file.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        try:
-            with open(filepath) as fp:
-                # Only parse the first line. For instance, on SLES there
-                # are multiple lines. We don't want them...
-                return self._parse_distro_release_content(fp.readline())
-        except (OSError, IOError):
-            # Ignore not being able to read a specific, seemingly version
-            # related file.
-            # See https://github.com/nir0s/distro/issues/162
-            return {}
-
-    @staticmethod
-    def _parse_distro_release_content(line):
-        """
-        Parse a line from a distro release file.
-
-        Parameters:
-        * line: Line from the distro release file. Must be a unicode string
-                or a UTF-8 encoded byte string.
-
-        Returns:
-            A dictionary containing all information items.
-        """
-        if isinstance(line, bytes):
-            line = line.decode('utf-8')
-        matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
-            line.strip()[::-1])
-        distro_info = {}
-        if matches:
-            # regexp ensures non-None
-            distro_info['name'] = matches.group(3)[::-1]
-            if matches.group(2):
-                distro_info['version_id'] = matches.group(2)[::-1]
-            if matches.group(1):
-                distro_info['codename'] = matches.group(1)[::-1]
-        elif line:
-            distro_info['name'] = line.strip()
-        return distro_info
-
-
-_distro = LinuxDistribution()
-
-
-def main():
-    logger = logging.getLogger(__name__)
-    logger.setLevel(logging.DEBUG)
-    logger.addHandler(logging.StreamHandler(sys.stdout))
-
-    parser = argparse.ArgumentParser(description="OS distro info tool")
-    parser.add_argument(
-        '--json',
-        '-j',
-        help="Output in machine readable format",
-        action="store_true")
-    args = parser.parse_args()
-
-    if args.json:
-        logger.info(json.dumps(info(), indent=4, sort_keys=True))
-    else:
-        logger.info('Name: %s', name(pretty=True))
-        distribution_version = version(pretty=True)
-        logger.info('Version: %s', distribution_version)
-        distribution_codename = codename()
-        logger.info('Codename: %s', distribution_codename)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py
deleted file mode 100644
index 0491234..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-"""
-HTML parsing library based on the `WHATWG HTML specification
-`_. The parser is designed to be compatible with
-existing HTML found in the wild and implements well-defined error recovery that
-is largely compatible with modern desktop web browsers.
-
-Example usage::
-
-    from pip._vendor import html5lib
-    with open("my_document.html", "rb") as f:
-        tree = html5lib.parse(f)
-
-For convenience, this module re-exports the following names:
-
-* :func:`~.html5parser.parse`
-* :func:`~.html5parser.parseFragment`
-* :class:`~.html5parser.HTMLParser`
-* :func:`~.treebuilders.getTreeBuilder`
-* :func:`~.treewalkers.getTreeWalker`
-* :func:`~.serializer.serialize`
-"""
-
-from __future__ import absolute_import, division, unicode_literals
-
-from .html5parser import HTMLParser, parse, parseFragment
-from .treebuilders import getTreeBuilder
-from .treewalkers import getTreeWalker
-from .serializer import serialize
-
-__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
-           "getTreeWalker", "serialize"]
-
-# this has to be at the top level, see how setup.py parses this
-#: Distribution version number.
-__version__ = "1.0.1"
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py
deleted file mode 100644
index 4c77717..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_ihatexml.py
+++ /dev/null
@@ -1,288 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-import re
-import warnings
-
-from .constants import DataLossWarning
-
-baseChar = """
-[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
-[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
-[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
-[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
-[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
-[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
-[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
-[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
-[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
-[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
-[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
-[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
-[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
-[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
-[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
-[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
-[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
-[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
-[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
-[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
-[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
-[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
-[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
-[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
-[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
-[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
-[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
-[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
-[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
-[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
-#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
-#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
-#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
-[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
-[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
-#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
-[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
-[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
-[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
-[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
-[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
-#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
-[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
-[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
-[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
-[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""
-
-ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""
-
-combiningCharacter = """
-[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
-[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
-[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
-[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
-#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
-[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
-[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
-#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
-[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
-[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
-#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
-[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
-[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
-[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
-[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
-[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
-#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
-[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
-#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
-[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
-[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
-#x3099 | #x309A"""
-
-digit = """
-[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
-[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
-[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
-[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""
-
-extender = """
-#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
-#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""
-
-letter = " | ".join([baseChar, ideographic])
-
-# Without the
-name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
-                   extender])
-nameFirst = " | ".join([letter, "_"])
-
-reChar = re.compile(r"#x([\d|A-F]{4,4})")
-reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")
-
-
-def charStringToList(chars):
-    charRanges = [item.strip() for item in chars.split(" | ")]
-    rv = []
-    for item in charRanges:
-        foundMatch = False
-        for regexp in (reChar, reCharRange):
-            match = regexp.match(item)
-            if match is not None:
-                rv.append([hexToInt(item) for item in match.groups()])
-                if len(rv[-1]) == 1:
-                    rv[-1] = rv[-1] * 2
-                foundMatch = True
-                break
-        if not foundMatch:
-            assert len(item) == 1
-
-            rv.append([ord(item)] * 2)
-    rv = normaliseCharList(rv)
-    return rv
-
-
-def normaliseCharList(charList):
-    charList = sorted(charList)
-    for item in charList:
-        assert item[1] >= item[0]
-    rv = []
-    i = 0
-    while i < len(charList):
-        j = 1
-        rv.append(charList[i])
-        while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
-            rv[-1][1] = charList[i + j][1]
-            j += 1
-        i += j
-    return rv
-
-# We don't really support characters above the BMP :(
-max_unicode = int("FFFF", 16)
-
-
-def missingRanges(charList):
-    rv = []
-    if charList[0] != 0:
-        rv.append([0, charList[0][0] - 1])
-    for i, item in enumerate(charList[:-1]):
-        rv.append([item[1] + 1, charList[i + 1][0] - 1])
-    if charList[-1][1] != max_unicode:
-        rv.append([charList[-1][1] + 1, max_unicode])
-    return rv
-
-
-def listToRegexpStr(charList):
-    rv = []
-    for item in charList:
-        if item[0] == item[1]:
-            rv.append(escapeRegexp(chr(item[0])))
-        else:
-            rv.append(escapeRegexp(chr(item[0])) + "-" +
-                      escapeRegexp(chr(item[1])))
-    return "[%s]" % "".join(rv)
-
-
-def hexToInt(hex_str):
-    return int(hex_str, 16)
-
-
-def escapeRegexp(string):
-    specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
-                         "[", "]", "|", "(", ")", "-")
-    for char in specialCharacters:
-        string = string.replace(char, "\\" + char)
-
-    return string
-
-# output from the above
-nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')  # noqa
-
-nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')  # noqa
-
-# Simpler things
-nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\\-'()+,./:=?;!*#@$_%]")
-
-
-class InfosetFilter(object):
-    replacementRegexp = re.compile(r"U[\dA-F]{5,5}")
-
-    def __init__(self,
-                 dropXmlnsLocalName=False,
-                 dropXmlnsAttrNs=False,
-                 preventDoubleDashComments=False,
-                 preventDashAtCommentEnd=False,
-                 replaceFormFeedCharacters=True,
-                 preventSingleQuotePubid=False):
-
-        self.dropXmlnsLocalName = dropXmlnsLocalName
-        self.dropXmlnsAttrNs = dropXmlnsAttrNs
-
-        self.preventDoubleDashComments = preventDoubleDashComments
-        self.preventDashAtCommentEnd = preventDashAtCommentEnd
-
-        self.replaceFormFeedCharacters = replaceFormFeedCharacters
-
-        self.preventSingleQuotePubid = preventSingleQuotePubid
-
-        self.replaceCache = {}
-
-    def coerceAttribute(self, name, namespace=None):
-        if self.dropXmlnsLocalName and name.startswith("xmlns:"):
-            warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
-            return None
-        elif (self.dropXmlnsAttrNs and
-              namespace == "http://www.w3.org/2000/xmlns/"):
-            warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
-            return None
-        else:
-            return self.toXmlName(name)
-
-    def coerceElement(self, name):
-        return self.toXmlName(name)
-
-    def coerceComment(self, data):
-        if self.preventDoubleDashComments:
-            while "--" in data:
-                warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
-                data = data.replace("--", "- -")
-            if data.endswith("-"):
-                warnings.warn("Comments cannot end in a dash", DataLossWarning)
-                data += " "
-        return data
-
-    def coerceCharacters(self, data):
-        if self.replaceFormFeedCharacters:
-            for _ in range(data.count("\x0C")):
-                warnings.warn("Text cannot contain U+000C", DataLossWarning)
-            data = data.replace("\x0C", " ")
-        # Other non-xml characters
-        return data
-
-    def coercePubid(self, data):
-        dataOutput = data
-        for char in nonPubidCharRegexp.findall(data):
-            warnings.warn("Coercing non-XML pubid", DataLossWarning)
-            replacement = self.getReplacementCharacter(char)
-            dataOutput = dataOutput.replace(char, replacement)
-        if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
-            warnings.warn("Pubid cannot contain single quote", DataLossWarning)
-            dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
-        return dataOutput
-
-    def toXmlName(self, name):
-        nameFirst = name[0]
-        nameRest = name[1:]
-        m = nonXmlNameFirstBMPRegexp.match(nameFirst)
-        if m:
-            warnings.warn("Coercing non-XML name", DataLossWarning)
-            nameFirstOutput = self.getReplacementCharacter(nameFirst)
-        else:
-            nameFirstOutput = nameFirst
-
-        nameRestOutput = nameRest
-        replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
-        for char in replaceChars:
-            warnings.warn("Coercing non-XML name", DataLossWarning)
-            replacement = self.getReplacementCharacter(char)
-            nameRestOutput = nameRestOutput.replace(char, replacement)
-        return nameFirstOutput + nameRestOutput
-
-    def getReplacementCharacter(self, char):
-        if char in self.replaceCache:
-            replacement = self.replaceCache[char]
-        else:
-            replacement = self.escapeChar(char)
-        return replacement
-
-    def fromXmlName(self, name):
-        for item in set(self.replacementRegexp.findall(name)):
-            name = name.replace(item, self.unescapeChar(item))
-        return name
-
-    def escapeChar(self, char):
-        replacement = "U%05X" % ord(char)
-        self.replaceCache[char] = replacement
-        return replacement
-
-    def unescapeChar(self, charcode):
-        return chr(int(charcode[1:], 16))
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py
deleted file mode 100644
index a65e55f..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_inputstream.py
+++ /dev/null
@@ -1,923 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from pip._vendor.six import text_type, binary_type
-from pip._vendor.six.moves import http_client, urllib
-
-import codecs
-import re
-
-from pip._vendor import webencodings
-
-from .constants import EOF, spaceCharacters, asciiLetters, asciiUppercase
-from .constants import _ReparseException
-from . import _utils
-
-from io import StringIO
-
-try:
-    from io import BytesIO
-except ImportError:
-    BytesIO = StringIO
-
-# Non-unicode versions of constants for use in the pre-parser
-spaceCharactersBytes = frozenset([item.encode("ascii") for item in spaceCharacters])
-asciiLettersBytes = frozenset([item.encode("ascii") for item in asciiLetters])
-asciiUppercaseBytes = frozenset([item.encode("ascii") for item in asciiUppercase])
-spacesAngleBrackets = spaceCharactersBytes | frozenset([b">", b"<"])
-
-
-invalid_unicode_no_surrogate = "[\u0001-\u0008\u000B\u000E-\u001F\u007F-\u009F\uFDD0-\uFDEF\uFFFE\uFFFF\U0001FFFE\U0001FFFF\U0002FFFE\U0002FFFF\U0003FFFE\U0003FFFF\U0004FFFE\U0004FFFF\U0005FFFE\U0005FFFF\U0006FFFE\U0006FFFF\U0007FFFE\U0007FFFF\U0008FFFE\U0008FFFF\U0009FFFE\U0009FFFF\U000AFFFE\U000AFFFF\U000BFFFE\U000BFFFF\U000CFFFE\U000CFFFF\U000DFFFE\U000DFFFF\U000EFFFE\U000EFFFF\U000FFFFE\U000FFFFF\U0010FFFE\U0010FFFF]"  # noqa
-
-if _utils.supports_lone_surrogates:
-    # Use one extra step of indirection and create surrogates with
-    # eval. Not using this indirection would introduce an illegal
-    # unicode literal on platforms not supporting such lone
-    # surrogates.
-    assert invalid_unicode_no_surrogate[-1] == "]" and invalid_unicode_no_surrogate.count("]") == 1
-    invalid_unicode_re = re.compile(invalid_unicode_no_surrogate[:-1] +
-                                    eval('"\\uD800-\\uDFFF"') +  # pylint:disable=eval-used
-                                    "]")
-else:
-    invalid_unicode_re = re.compile(invalid_unicode_no_surrogate)
-
-non_bmp_invalid_codepoints = set([0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
-                                  0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF,
-                                  0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE,
-                                  0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF,
-                                  0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
-                                  0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF,
-                                  0x10FFFE, 0x10FFFF])
-
-ascii_punctuation_re = re.compile("[\u0009-\u000D\u0020-\u002F\u003A-\u0040\u005C\u005B-\u0060\u007B-\u007E]")
-
-# Cache for charsUntil()
-charsUntilRegEx = {}
-
-
-class BufferedStream(object):
-    """Buffering for streams that do not have buffering of their own
-
-    The buffer is implemented as a list of chunks on the assumption that
-    joining many strings will be slow since it is O(n**2)
-    """
-
-    def __init__(self, stream):
-        self.stream = stream
-        self.buffer = []
-        self.position = [-1, 0]  # chunk number, offset
-
-    def tell(self):
-        pos = 0
-        for chunk in self.buffer[:self.position[0]]:
-            pos += len(chunk)
-        pos += self.position[1]
-        return pos
-
-    def seek(self, pos):
-        assert pos <= self._bufferedBytes()
-        offset = pos
-        i = 0
-        while len(self.buffer[i]) < offset:
-            offset -= len(self.buffer[i])
-            i += 1
-        self.position = [i, offset]
-
-    def read(self, bytes):
-        if not self.buffer:
-            return self._readStream(bytes)
-        elif (self.position[0] == len(self.buffer) and
-              self.position[1] == len(self.buffer[-1])):
-            return self._readStream(bytes)
-        else:
-            return self._readFromBuffer(bytes)
-
-    def _bufferedBytes(self):
-        return sum([len(item) for item in self.buffer])
-
-    def _readStream(self, bytes):
-        data = self.stream.read(bytes)
-        self.buffer.append(data)
-        self.position[0] += 1
-        self.position[1] = len(data)
-        return data
-
-    def _readFromBuffer(self, bytes):
-        remainingBytes = bytes
-        rv = []
-        bufferIndex = self.position[0]
-        bufferOffset = self.position[1]
-        while bufferIndex < len(self.buffer) and remainingBytes != 0:
-            assert remainingBytes > 0
-            bufferedData = self.buffer[bufferIndex]
-
-            if remainingBytes <= len(bufferedData) - bufferOffset:
-                bytesToRead = remainingBytes
-                self.position = [bufferIndex, bufferOffset + bytesToRead]
-            else:
-                bytesToRead = len(bufferedData) - bufferOffset
-                self.position = [bufferIndex, len(bufferedData)]
-                bufferIndex += 1
-            rv.append(bufferedData[bufferOffset:bufferOffset + bytesToRead])
-            remainingBytes -= bytesToRead
-
-            bufferOffset = 0
-
-        if remainingBytes:
-            rv.append(self._readStream(remainingBytes))
-
-        return b"".join(rv)
-
-
-def HTMLInputStream(source, **kwargs):
-    # Work around Python bug #20007: read(0) closes the connection.
-    # http://bugs.python.org/issue20007
-    if (isinstance(source, http_client.HTTPResponse) or
-        # Also check for addinfourl wrapping HTTPResponse
-        (isinstance(source, urllib.response.addbase) and
-         isinstance(source.fp, http_client.HTTPResponse))):
-        isUnicode = False
-    elif hasattr(source, "read"):
-        isUnicode = isinstance(source.read(0), text_type)
-    else:
-        isUnicode = isinstance(source, text_type)
-
-    if isUnicode:
-        encodings = [x for x in kwargs if x.endswith("_encoding")]
-        if encodings:
-            raise TypeError("Cannot set an encoding with a unicode input, set %r" % encodings)
-
-        return HTMLUnicodeInputStream(source, **kwargs)
-    else:
-        return HTMLBinaryInputStream(source, **kwargs)
-
-
-class HTMLUnicodeInputStream(object):
-    """Provides a unicode stream of characters to the HTMLTokenizer.
-
-    This class takes care of character encoding and removing or replacing
-    incorrect byte-sequences and also provides column and line tracking.
-
-    """
-
-    _defaultChunkSize = 10240
-
-    def __init__(self, source):
-        """Initialises the HTMLInputStream.
-
-        HTMLInputStream(source, [encoding]) -> Normalized stream from source
-        for use by html5lib.
-
-        source can be either a file-object, local filename or a string.
-
-        The optional encoding parameter must be a string that indicates
-        the encoding.  If specified, that encoding will be used,
-        regardless of any BOM or later declaration (such as in a meta
-        element)
-
-        """
-
-        if not _utils.supports_lone_surrogates:
-            # Such platforms will have already checked for such
-            # surrogate errors, so no need to do this checking.
-            self.reportCharacterErrors = None
-        elif len("\U0010FFFF") == 1:
-            self.reportCharacterErrors = self.characterErrorsUCS4
-        else:
-            self.reportCharacterErrors = self.characterErrorsUCS2
-
-        # List of where new lines occur
-        self.newLines = [0]
-
-        self.charEncoding = (lookupEncoding("utf-8"), "certain")
-        self.dataStream = self.openStream(source)
-
-        self.reset()
-
-    def reset(self):
-        self.chunk = ""
-        self.chunkSize = 0
-        self.chunkOffset = 0
-        self.errors = []
-
-        # number of (complete) lines in previous chunks
-        self.prevNumLines = 0
-        # number of columns in the last line of the previous chunk
-        self.prevNumCols = 0
-
-        # Deal with CR LF and surrogates split over chunk boundaries
-        self._bufferedCharacter = None
-
-    def openStream(self, source):
-        """Produces a file object from source.
-
-        source can be either a file object, local filename or a string.
-
-        """
-        # Already a file object
-        if hasattr(source, 'read'):
-            stream = source
-        else:
-            stream = StringIO(source)
-
-        return stream
-
-    def _position(self, offset):
-        chunk = self.chunk
-        nLines = chunk.count('\n', 0, offset)
-        positionLine = self.prevNumLines + nLines
-        lastLinePos = chunk.rfind('\n', 0, offset)
-        if lastLinePos == -1:
-            positionColumn = self.prevNumCols + offset
-        else:
-            positionColumn = offset - (lastLinePos + 1)
-        return (positionLine, positionColumn)
-
-    def position(self):
-        """Returns (line, col) of the current position in the stream."""
-        line, col = self._position(self.chunkOffset)
-        return (line + 1, col)
-
-    def char(self):
-        """ Read one character from the stream or queue if available. Return
-            EOF when EOF is reached.
-        """
-        # Read a new chunk from the input stream if necessary
-        if self.chunkOffset >= self.chunkSize:
-            if not self.readChunk():
-                return EOF
-
-        chunkOffset = self.chunkOffset
-        char = self.chunk[chunkOffset]
-        self.chunkOffset = chunkOffset + 1
-
-        return char
-
-    def readChunk(self, chunkSize=None):
-        if chunkSize is None:
-            chunkSize = self._defaultChunkSize
-
-        self.prevNumLines, self.prevNumCols = self._position(self.chunkSize)
-
-        self.chunk = ""
-        self.chunkSize = 0
-        self.chunkOffset = 0
-
-        data = self.dataStream.read(chunkSize)
-
-        # Deal with CR LF and surrogates broken across chunks
-        if self._bufferedCharacter:
-            data = self._bufferedCharacter + data
-            self._bufferedCharacter = None
-        elif not data:
-            # We have no more data, bye-bye stream
-            return False
-
-        if len(data) > 1:
-            lastv = ord(data[-1])
-            if lastv == 0x0D or 0xD800 <= lastv <= 0xDBFF:
-                self._bufferedCharacter = data[-1]
-                data = data[:-1]
-
-        if self.reportCharacterErrors:
-            self.reportCharacterErrors(data)
-
-        # Replace invalid characters
-        data = data.replace("\r\n", "\n")
-        data = data.replace("\r", "\n")
-
-        self.chunk = data
-        self.chunkSize = len(data)
-
-        return True
-
-    def characterErrorsUCS4(self, data):
-        for _ in range(len(invalid_unicode_re.findall(data))):
-            self.errors.append("invalid-codepoint")
-
-    def characterErrorsUCS2(self, data):
-        # Someone picked the wrong compile option
-        # You lose
-        skip = False
-        for match in invalid_unicode_re.finditer(data):
-            if skip:
-                continue
-            codepoint = ord(match.group())
-            pos = match.start()
-            # Pretty sure there should be endianness issues here
-            if _utils.isSurrogatePair(data[pos:pos + 2]):
-                # We have a surrogate pair!
-                char_val = _utils.surrogatePairToCodepoint(data[pos:pos + 2])
-                if char_val in non_bmp_invalid_codepoints:
-                    self.errors.append("invalid-codepoint")
-                skip = True
-            elif (codepoint >= 0xD800 and codepoint <= 0xDFFF and
-                  pos == len(data) - 1):
-                self.errors.append("invalid-codepoint")
-            else:
-                skip = False
-                self.errors.append("invalid-codepoint")
-
-    def charsUntil(self, characters, opposite=False):
-        """ Returns a string of characters from the stream up to but not
-        including any character in 'characters' or EOF. 'characters' must be
-        a container that supports the 'in' method and iteration over its
-        characters.
-        """
-
-        # Use a cache of regexps to find the required characters
-        try:
-            chars = charsUntilRegEx[(characters, opposite)]
-        except KeyError:
-            if __debug__:
-                for c in characters:
-                    assert(ord(c) < 128)
-            regex = "".join(["\\x%02x" % ord(c) for c in characters])
-            if not opposite:
-                regex = "^%s" % regex
-            chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex)
-
-        rv = []
-
-        while True:
-            # Find the longest matching prefix
-            m = chars.match(self.chunk, self.chunkOffset)
-            if m is None:
-                # If nothing matched, and it wasn't because we ran out of chunk,
-                # then stop
-                if self.chunkOffset != self.chunkSize:
-                    break
-            else:
-                end = m.end()
-                # If not the whole chunk matched, return everything
-                # up to the part that didn't match
-                if end != self.chunkSize:
-                    rv.append(self.chunk[self.chunkOffset:end])
-                    self.chunkOffset = end
-                    break
-            # If the whole remainder of the chunk matched,
-            # use it all and read the next chunk
-            rv.append(self.chunk[self.chunkOffset:])
-            if not self.readChunk():
-                # Reached EOF
-                break
-
-        r = "".join(rv)
-        return r
-
-    def unget(self, char):
-        # Only one character is allowed to be ungotten at once - it must
-        # be consumed again before any further call to unget
-        if char is not None:
-            if self.chunkOffset == 0:
-                # unget is called quite rarely, so it's a good idea to do
-                # more work here if it saves a bit of work in the frequently
-                # called char and charsUntil.
-                # So, just prepend the ungotten character onto the current
-                # chunk:
-                self.chunk = char + self.chunk
-                self.chunkSize += 1
-            else:
-                self.chunkOffset -= 1
-                assert self.chunk[self.chunkOffset] == char
-
-
-class HTMLBinaryInputStream(HTMLUnicodeInputStream):
-    """Provides a unicode stream of characters to the HTMLTokenizer.
-
-    This class takes care of character encoding and removing or replacing
-    incorrect byte-sequences and also provides column and line tracking.
-
-    """
-
-    def __init__(self, source, override_encoding=None, transport_encoding=None,
-                 same_origin_parent_encoding=None, likely_encoding=None,
-                 default_encoding="windows-1252", useChardet=True):
-        """Initialises the HTMLInputStream.
-
-        HTMLInputStream(source, [encoding]) -> Normalized stream from source
-        for use by html5lib.
-
-        source can be either a file-object, local filename or a string.
-
-        The optional encoding parameter must be a string that indicates
-        the encoding.  If specified, that encoding will be used,
-        regardless of any BOM or later declaration (such as in a meta
-        element)
-
-        """
-        # Raw Stream - for unicode objects this will encode to utf-8 and set
-        #              self.charEncoding as appropriate
-        self.rawStream = self.openStream(source)
-
-        HTMLUnicodeInputStream.__init__(self, self.rawStream)
-
-        # Encoding Information
-        # Number of bytes to use when looking for a meta element with
-        # encoding information
-        self.numBytesMeta = 1024
-        # Number of bytes to use when using detecting encoding using chardet
-        self.numBytesChardet = 100
-        # Things from args
-        self.override_encoding = override_encoding
-        self.transport_encoding = transport_encoding
-        self.same_origin_parent_encoding = same_origin_parent_encoding
-        self.likely_encoding = likely_encoding
-        self.default_encoding = default_encoding
-
-        # Determine encoding
-        self.charEncoding = self.determineEncoding(useChardet)
-        assert self.charEncoding[0] is not None
-
-        # Call superclass
-        self.reset()
-
-    def reset(self):
-        self.dataStream = self.charEncoding[0].codec_info.streamreader(self.rawStream, 'replace')
-        HTMLUnicodeInputStream.reset(self)
-
-    def openStream(self, source):
-        """Produces a file object from source.
-
-        source can be either a file object, local filename or a string.
-
-        """
-        # Already a file object
-        if hasattr(source, 'read'):
-            stream = source
-        else:
-            stream = BytesIO(source)
-
-        try:
-            stream.seek(stream.tell())
-        except:  # pylint:disable=bare-except
-            stream = BufferedStream(stream)
-
-        return stream
-
-    def determineEncoding(self, chardet=True):
-        # BOMs take precedence over everything
-        # This will also read past the BOM if present
-        charEncoding = self.detectBOM(), "certain"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # If we've been overriden, we've been overriden
-        charEncoding = lookupEncoding(self.override_encoding), "certain"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # Now check the transport layer
-        charEncoding = lookupEncoding(self.transport_encoding), "certain"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # Look for meta elements with encoding information
-        charEncoding = self.detectEncodingMeta(), "tentative"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # Parent document encoding
-        charEncoding = lookupEncoding(self.same_origin_parent_encoding), "tentative"
-        if charEncoding[0] is not None and not charEncoding[0].name.startswith("utf-16"):
-            return charEncoding
-
-        # "likely" encoding
-        charEncoding = lookupEncoding(self.likely_encoding), "tentative"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # Guess with chardet, if available
-        if chardet:
-            try:
-                from pip._vendor.chardet.universaldetector import UniversalDetector
-            except ImportError:
-                pass
-            else:
-                buffers = []
-                detector = UniversalDetector()
-                while not detector.done:
-                    buffer = self.rawStream.read(self.numBytesChardet)
-                    assert isinstance(buffer, bytes)
-                    if not buffer:
-                        break
-                    buffers.append(buffer)
-                    detector.feed(buffer)
-                detector.close()
-                encoding = lookupEncoding(detector.result['encoding'])
-                self.rawStream.seek(0)
-                if encoding is not None:
-                    return encoding, "tentative"
-
-        # Try the default encoding
-        charEncoding = lookupEncoding(self.default_encoding), "tentative"
-        if charEncoding[0] is not None:
-            return charEncoding
-
-        # Fallback to html5lib's default if even that hasn't worked
-        return lookupEncoding("windows-1252"), "tentative"
-
-    def changeEncoding(self, newEncoding):
-        assert self.charEncoding[1] != "certain"
-        newEncoding = lookupEncoding(newEncoding)
-        if newEncoding is None:
-            return
-        if newEncoding.name in ("utf-16be", "utf-16le"):
-            newEncoding = lookupEncoding("utf-8")
-            assert newEncoding is not None
-        elif newEncoding == self.charEncoding[0]:
-            self.charEncoding = (self.charEncoding[0], "certain")
-        else:
-            self.rawStream.seek(0)
-            self.charEncoding = (newEncoding, "certain")
-            self.reset()
-            raise _ReparseException("Encoding changed from %s to %s" % (self.charEncoding[0], newEncoding))
-
-    def detectBOM(self):
-        """Attempts to detect at BOM at the start of the stream. If
-        an encoding can be determined from the BOM return the name of the
-        encoding otherwise return None"""
-        bomDict = {
-            codecs.BOM_UTF8: 'utf-8',
-            codecs.BOM_UTF16_LE: 'utf-16le', codecs.BOM_UTF16_BE: 'utf-16be',
-            codecs.BOM_UTF32_LE: 'utf-32le', codecs.BOM_UTF32_BE: 'utf-32be'
-        }
-
-        # Go to beginning of file and read in 4 bytes
-        string = self.rawStream.read(4)
-        assert isinstance(string, bytes)
-
-        # Try detecting the BOM using bytes from the string
-        encoding = bomDict.get(string[:3])         # UTF-8
-        seek = 3
-        if not encoding:
-            # Need to detect UTF-32 before UTF-16
-            encoding = bomDict.get(string)         # UTF-32
-            seek = 4
-            if not encoding:
-                encoding = bomDict.get(string[:2])  # UTF-16
-                seek = 2
-
-        # Set the read position past the BOM if one was found, otherwise
-        # set it to the start of the stream
-        if encoding:
-            self.rawStream.seek(seek)
-            return lookupEncoding(encoding)
-        else:
-            self.rawStream.seek(0)
-            return None
-
-    def detectEncodingMeta(self):
-        """Report the encoding declared by the meta element
-        """
-        buffer = self.rawStream.read(self.numBytesMeta)
-        assert isinstance(buffer, bytes)
-        parser = EncodingParser(buffer)
-        self.rawStream.seek(0)
-        encoding = parser.getEncoding()
-
-        if encoding is not None and encoding.name in ("utf-16be", "utf-16le"):
-            encoding = lookupEncoding("utf-8")
-
-        return encoding
-
-
-class EncodingBytes(bytes):
-    """String-like object with an associated position and various extra methods
-    If the position is ever greater than the string length then an exception is
-    raised"""
-    def __new__(self, value):
-        assert isinstance(value, bytes)
-        return bytes.__new__(self, value.lower())
-
-    def __init__(self, value):
-        # pylint:disable=unused-argument
-        self._position = -1
-
-    def __iter__(self):
-        return self
-
-    def __next__(self):
-        p = self._position = self._position + 1
-        if p >= len(self):
-            raise StopIteration
-        elif p < 0:
-            raise TypeError
-        return self[p:p + 1]
-
-    def next(self):
-        # Py2 compat
-        return self.__next__()
-
-    def previous(self):
-        p = self._position
-        if p >= len(self):
-            raise StopIteration
-        elif p < 0:
-            raise TypeError
-        self._position = p = p - 1
-        return self[p:p + 1]
-
-    def setPosition(self, position):
-        if self._position >= len(self):
-            raise StopIteration
-        self._position = position
-
-    def getPosition(self):
-        if self._position >= len(self):
-            raise StopIteration
-        if self._position >= 0:
-            return self._position
-        else:
-            return None
-
-    position = property(getPosition, setPosition)
-
-    def getCurrentByte(self):
-        return self[self.position:self.position + 1]
-
-    currentByte = property(getCurrentByte)
-
-    def skip(self, chars=spaceCharactersBytes):
-        """Skip past a list of characters"""
-        p = self.position               # use property for the error-checking
-        while p < len(self):
-            c = self[p:p + 1]
-            if c not in chars:
-                self._position = p
-                return c
-            p += 1
-        self._position = p
-        return None
-
-    def skipUntil(self, chars):
-        p = self.position
-        while p < len(self):
-            c = self[p:p + 1]
-            if c in chars:
-                self._position = p
-                return c
-            p += 1
-        self._position = p
-        return None
-
-    def matchBytes(self, bytes):
-        """Look for a sequence of bytes at the start of a string. If the bytes
-        are found return True and advance the position to the byte after the
-        match. Otherwise return False and leave the position alone"""
-        p = self.position
-        data = self[p:p + len(bytes)]
-        rv = data.startswith(bytes)
-        if rv:
-            self.position += len(bytes)
-        return rv
-
-    def jumpTo(self, bytes):
-        """Look for the next sequence of bytes matching a given sequence. If
-        a match is found advance the position to the last byte of the match"""
-        newPosition = self[self.position:].find(bytes)
-        if newPosition > -1:
-            # XXX: This is ugly, but I can't see a nicer way to fix this.
-            if self._position == -1:
-                self._position = 0
-            self._position += (newPosition + len(bytes) - 1)
-            return True
-        else:
-            raise StopIteration
-
-
-class EncodingParser(object):
-    """Mini parser for detecting character encoding from meta elements"""
-
-    def __init__(self, data):
-        """string - the data to work on for encoding detection"""
-        self.data = EncodingBytes(data)
-        self.encoding = None
-
-    def getEncoding(self):
-        methodDispatch = (
-            (b"")
-
-    def handleMeta(self):
-        if self.data.currentByte not in spaceCharactersBytes:
-            # if we have ")
-
-    def getAttribute(self):
-        """Return a name,value pair for the next attribute in the stream,
-        if one is found, or None"""
-        data = self.data
-        # Step 1 (skip chars)
-        c = data.skip(spaceCharactersBytes | frozenset([b"/"]))
-        assert c is None or len(c) == 1
-        # Step 2
-        if c in (b">", None):
-            return None
-        # Step 3
-        attrName = []
-        attrValue = []
-        # Step 4 attribute name
-        while True:
-            if c == b"=" and attrName:
-                break
-            elif c in spaceCharactersBytes:
-                # Step 6!
-                c = data.skip()
-                break
-            elif c in (b"/", b">"):
-                return b"".join(attrName), b""
-            elif c in asciiUppercaseBytes:
-                attrName.append(c.lower())
-            elif c is None:
-                return None
-            else:
-                attrName.append(c)
-            # Step 5
-            c = next(data)
-        # Step 7
-        if c != b"=":
-            data.previous()
-            return b"".join(attrName), b""
-        # Step 8
-        next(data)
-        # Step 9
-        c = data.skip()
-        # Step 10
-        if c in (b"'", b'"'):
-            # 10.1
-            quoteChar = c
-            while True:
-                # 10.2
-                c = next(data)
-                # 10.3
-                if c == quoteChar:
-                    next(data)
-                    return b"".join(attrName), b"".join(attrValue)
-                # 10.4
-                elif c in asciiUppercaseBytes:
-                    attrValue.append(c.lower())
-                # 10.5
-                else:
-                    attrValue.append(c)
-        elif c == b">":
-            return b"".join(attrName), b""
-        elif c in asciiUppercaseBytes:
-            attrValue.append(c.lower())
-        elif c is None:
-            return None
-        else:
-            attrValue.append(c)
-        # Step 11
-        while True:
-            c = next(data)
-            if c in spacesAngleBrackets:
-                return b"".join(attrName), b"".join(attrValue)
-            elif c in asciiUppercaseBytes:
-                attrValue.append(c.lower())
-            elif c is None:
-                return None
-            else:
-                attrValue.append(c)
-
-
-class ContentAttrParser(object):
-    def __init__(self, data):
-        assert isinstance(data, bytes)
-        self.data = data
-
-    def parse(self):
-        try:
-            # Check if the attr name is charset
-            # otherwise return
-            self.data.jumpTo(b"charset")
-            self.data.position += 1
-            self.data.skip()
-            if not self.data.currentByte == b"=":
-                # If there is no = sign keep looking for attrs
-                return None
-            self.data.position += 1
-            self.data.skip()
-            # Look for an encoding between matching quote marks
-            if self.data.currentByte in (b'"', b"'"):
-                quoteMark = self.data.currentByte
-                self.data.position += 1
-                oldPosition = self.data.position
-                if self.data.jumpTo(quoteMark):
-                    return self.data[oldPosition:self.data.position]
-                else:
-                    return None
-            else:
-                # Unquoted value
-                oldPosition = self.data.position
-                try:
-                    self.data.skipUntil(spaceCharactersBytes)
-                    return self.data[oldPosition:self.data.position]
-                except StopIteration:
-                    # Return the whole remaining value
-                    return self.data[oldPosition:]
-        except StopIteration:
-            return None
-
-
-def lookupEncoding(encoding):
-    """Return the python codec name corresponding to an encoding or None if the
-    string doesn't correspond to a valid encoding."""
-    if isinstance(encoding, binary_type):
-        try:
-            encoding = encoding.decode("ascii")
-        except UnicodeDecodeError:
-            return None
-
-    if encoding is not None:
-        try:
-            return webencodings.lookup(encoding)
-        except AttributeError:
-            return None
-    else:
-        return None
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py
deleted file mode 100644
index 178f6e7..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_tokenizer.py
+++ /dev/null
@@ -1,1721 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from pip._vendor.six import unichr as chr
-
-from collections import deque
-
-from .constants import spaceCharacters
-from .constants import entities
-from .constants import asciiLetters, asciiUpper2Lower
-from .constants import digits, hexDigits, EOF
-from .constants import tokenTypes, tagTokenTypes
-from .constants import replacementCharacters
-
-from ._inputstream import HTMLInputStream
-
-from ._trie import Trie
-
-entitiesTrie = Trie(entities)
-
-
-class HTMLTokenizer(object):
-    """ This class takes care of tokenizing HTML.
-
-    * self.currentToken
-      Holds the token that is currently being processed.
-
-    * self.state
-      Holds a reference to the method to be invoked... XXX
-
-    * self.stream
-      Points to HTMLInputStream object.
-    """
-
-    def __init__(self, stream, parser=None, **kwargs):
-
-        self.stream = HTMLInputStream(stream, **kwargs)
-        self.parser = parser
-
-        # Setup the initial tokenizer state
-        self.escapeFlag = False
-        self.lastFourChars = []
-        self.state = self.dataState
-        self.escape = False
-
-        # The current token being created
-        self.currentToken = None
-        super(HTMLTokenizer, self).__init__()
-
-    def __iter__(self):
-        """ This is where the magic happens.
-
-        We do our usually processing through the states and when we have a token
-        to return we yield the token which pauses processing until the next token
-        is requested.
-        """
-        self.tokenQueue = deque([])
-        # Start processing. When EOF is reached self.state will return False
-        # instead of True and the loop will terminate.
-        while self.state():
-            while self.stream.errors:
-                yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)}
-            while self.tokenQueue:
-                yield self.tokenQueue.popleft()
-
-    def consumeNumberEntity(self, isHex):
-        """This function returns either U+FFFD or the character based on the
-        decimal or hexadecimal representation. It also discards ";" if present.
-        If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
-        """
-
-        allowed = digits
-        radix = 10
-        if isHex:
-            allowed = hexDigits
-            radix = 16
-
-        charStack = []
-
-        # Consume all the characters that are in range while making sure we
-        # don't hit an EOF.
-        c = self.stream.char()
-        while c in allowed and c is not EOF:
-            charStack.append(c)
-            c = self.stream.char()
-
-        # Convert the set of characters consumed to an int.
-        charAsInt = int("".join(charStack), radix)
-
-        # Certain characters get replaced with others
-        if charAsInt in replacementCharacters:
-            char = replacementCharacters[charAsInt]
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "illegal-codepoint-for-numeric-entity",
-                                    "datavars": {"charAsInt": charAsInt}})
-        elif ((0xD800 <= charAsInt <= 0xDFFF) or
-              (charAsInt > 0x10FFFF)):
-            char = "\uFFFD"
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "illegal-codepoint-for-numeric-entity",
-                                    "datavars": {"charAsInt": charAsInt}})
-        else:
-            # Should speed up this check somehow (e.g. move the set to a constant)
-            if ((0x0001 <= charAsInt <= 0x0008) or
-                (0x000E <= charAsInt <= 0x001F) or
-                (0x007F <= charAsInt <= 0x009F) or
-                (0xFDD0 <= charAsInt <= 0xFDEF) or
-                charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,
-                                        0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
-                                        0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,
-                                        0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,
-                                        0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,
-                                        0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,
-                                        0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
-                                        0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,
-                                        0xFFFFF, 0x10FFFE, 0x10FFFF])):
-                self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                        "data":
-                                        "illegal-codepoint-for-numeric-entity",
-                                        "datavars": {"charAsInt": charAsInt}})
-            try:
-                # Try/except needed as UCS-2 Python builds' unichar only works
-                # within the BMP.
-                char = chr(charAsInt)
-            except ValueError:
-                v = charAsInt - 0x10000
-                char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))
-
-        # Discard the ; if present. Otherwise, put it back on the queue and
-        # invoke parseError on parser.
-        if c != ";":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "numeric-entity-without-semicolon"})
-            self.stream.unget(c)
-
-        return char
-
-    def consumeEntity(self, allowedChar=None, fromAttribute=False):
-        # Initialise to the default output for when no entity is matched
-        output = "&"
-
-        charStack = [self.stream.char()]
-        if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or
-                (allowedChar is not None and allowedChar == charStack[0])):
-            self.stream.unget(charStack[0])
-
-        elif charStack[0] == "#":
-            # Read the next character to see if it's hex or decimal
-            hex = False
-            charStack.append(self.stream.char())
-            if charStack[-1] in ("x", "X"):
-                hex = True
-                charStack.append(self.stream.char())
-
-            # charStack[-1] should be the first digit
-            if (hex and charStack[-1] in hexDigits) \
-                    or (not hex and charStack[-1] in digits):
-                # At least one digit found, so consume the whole number
-                self.stream.unget(charStack[-1])
-                output = self.consumeNumberEntity(hex)
-            else:
-                # No digits found
-                self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                        "data": "expected-numeric-entity"})
-                self.stream.unget(charStack.pop())
-                output = "&" + "".join(charStack)
-
-        else:
-            # At this point in the process might have named entity. Entities
-            # are stored in the global variable "entities".
-            #
-            # Consume characters and compare to these to a substring of the
-            # entity names in the list until the substring no longer matches.
-            while (charStack[-1] is not EOF):
-                if not entitiesTrie.has_keys_with_prefix("".join(charStack)):
-                    break
-                charStack.append(self.stream.char())
-
-            # At this point we have a string that starts with some characters
-            # that may match an entity
-            # Try to find the longest entity the string will match to take care
-            # of ¬i for instance.
-            try:
-                entityName = entitiesTrie.longest_prefix("".join(charStack[:-1]))
-                entityLength = len(entityName)
-            except KeyError:
-                entityName = None
-
-            if entityName is not None:
-                if entityName[-1] != ";":
-                    self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                            "named-entity-without-semicolon"})
-                if (entityName[-1] != ";" and fromAttribute and
-                    (charStack[entityLength] in asciiLetters or
-                     charStack[entityLength] in digits or
-                     charStack[entityLength] == "=")):
-                    self.stream.unget(charStack.pop())
-                    output = "&" + "".join(charStack)
-                else:
-                    output = entities[entityName]
-                    self.stream.unget(charStack.pop())
-                    output += "".join(charStack[entityLength:])
-            else:
-                self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                        "expected-named-entity"})
-                self.stream.unget(charStack.pop())
-                output = "&" + "".join(charStack)
-
-        if fromAttribute:
-            self.currentToken["data"][-1][1] += output
-        else:
-            if output in spaceCharacters:
-                tokenType = "SpaceCharacters"
-            else:
-                tokenType = "Characters"
-            self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output})
-
-    def processEntityInAttribute(self, allowedChar):
-        """This method replaces the need for "entityInAttributeValueState".
-        """
-        self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)
-
-    def emitCurrentToken(self):
-        """This method is a generic handler for emitting the tags. It also sets
-        the state to "data" because that's what's needed after a token has been
-        emitted.
-        """
-        token = self.currentToken
-        # Add token to the queue to be yielded
-        if (token["type"] in tagTokenTypes):
-            token["name"] = token["name"].translate(asciiUpper2Lower)
-            if token["type"] == tokenTypes["EndTag"]:
-                if token["data"]:
-                    self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                            "data": "attributes-in-end-tag"})
-                if token["selfClosing"]:
-                    self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                            "data": "self-closing-flag-on-end-tag"})
-        self.tokenQueue.append(token)
-        self.state = self.dataState
-
-    # Below are the various tokenizer states worked out.
-    def dataState(self):
-        data = self.stream.char()
-        if data == "&":
-            self.state = self.entityDataState
-        elif data == "<":
-            self.state = self.tagOpenState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\u0000"})
-        elif data is EOF:
-            # Tokenization ends.
-            return False
-        elif data in spaceCharacters:
-            # Directly after emitting a token you switch back to the "data
-            # state". At that point spaceCharacters are important so they are
-            # emitted separately.
-            self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
-                                    data + self.stream.charsUntil(spaceCharacters, True)})
-            # No need to update lastFourChars here, since the first space will
-            # have already been appended to lastFourChars and will have broken
-            # any  sequences
-        else:
-            chars = self.stream.charsUntil(("&", "<", "\u0000"))
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
-                                    data + chars})
-        return True
-
-    def entityDataState(self):
-        self.consumeEntity()
-        self.state = self.dataState
-        return True
-
-    def rcdataState(self):
-        data = self.stream.char()
-        if data == "&":
-            self.state = self.characterReferenceInRcdata
-        elif data == "<":
-            self.state = self.rcdataLessThanSignState
-        elif data == EOF:
-            # Tokenization ends.
-            return False
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-        elif data in spaceCharacters:
-            # Directly after emitting a token you switch back to the "data
-            # state". At that point spaceCharacters are important so they are
-            # emitted separately.
-            self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data":
-                                    data + self.stream.charsUntil(spaceCharacters, True)})
-            # No need to update lastFourChars here, since the first space will
-            # have already been appended to lastFourChars and will have broken
-            # any  sequences
-        else:
-            chars = self.stream.charsUntil(("&", "<", "\u0000"))
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
-                                    data + chars})
-        return True
-
-    def characterReferenceInRcdata(self):
-        self.consumeEntity()
-        self.state = self.rcdataState
-        return True
-
-    def rawtextState(self):
-        data = self.stream.char()
-        if data == "<":
-            self.state = self.rawtextLessThanSignState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-        elif data == EOF:
-            # Tokenization ends.
-            return False
-        else:
-            chars = self.stream.charsUntil(("<", "\u0000"))
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
-                                    data + chars})
-        return True
-
-    def scriptDataState(self):
-        data = self.stream.char()
-        if data == "<":
-            self.state = self.scriptDataLessThanSignState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-        elif data == EOF:
-            # Tokenization ends.
-            return False
-        else:
-            chars = self.stream.charsUntil(("<", "\u0000"))
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
-                                    data + chars})
-        return True
-
-    def plaintextState(self):
-        data = self.stream.char()
-        if data == EOF:
-            # Tokenization ends.
-            return False
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data":
-                                    data + self.stream.charsUntil("\u0000")})
-        return True
-
-    def tagOpenState(self):
-        data = self.stream.char()
-        if data == "!":
-            self.state = self.markupDeclarationOpenState
-        elif data == "/":
-            self.state = self.closeTagOpenState
-        elif data in asciiLetters:
-            self.currentToken = {"type": tokenTypes["StartTag"],
-                                 "name": data, "data": [],
-                                 "selfClosing": False,
-                                 "selfClosingAcknowledged": False}
-            self.state = self.tagNameState
-        elif data == ">":
-            # XXX In theory it could be something besides a tag name. But
-            # do we really care?
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-tag-name-but-got-right-bracket"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"})
-            self.state = self.dataState
-        elif data == "?":
-            # XXX In theory it could be something besides a tag name. But
-            # do we really care?
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-tag-name-but-got-question-mark"})
-            self.stream.unget(data)
-            self.state = self.bogusCommentState
-        else:
-            # XXX
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-tag-name"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.stream.unget(data)
-            self.state = self.dataState
-        return True
-
-    def closeTagOpenState(self):
-        data = self.stream.char()
-        if data in asciiLetters:
-            self.currentToken = {"type": tokenTypes["EndTag"], "name": data,
-                                 "data": [], "selfClosing": False}
-            self.state = self.tagNameState
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-closing-tag-but-got-right-bracket"})
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-closing-tag-but-got-eof"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "":
-            self.emitCurrentToken()
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-tag-name"})
-            self.state = self.dataState
-        elif data == "/":
-            self.state = self.selfClosingStartTagState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["name"] += "\uFFFD"
-        else:
-            self.currentToken["name"] += data
-            # (Don't use charsUntil here, because tag names are
-            # very short and it's faster to not do anything fancy)
-        return True
-
-    def rcdataLessThanSignState(self):
-        data = self.stream.char()
-        if data == "/":
-            self.temporaryBuffer = ""
-            self.state = self.rcdataEndTagOpenState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.stream.unget(data)
-            self.state = self.rcdataState
-        return True
-
-    def rcdataEndTagOpenState(self):
-        data = self.stream.char()
-        if data in asciiLetters:
-            self.temporaryBuffer += data
-            self.state = self.rcdataEndTagNameState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate:
-            self.currentToken = {"type": tokenTypes["EndTag"],
-                                 "name": self.temporaryBuffer,
-                                 "data": [], "selfClosing": False}
-            self.emitCurrentToken()
-            self.state = self.dataState
-        elif data in asciiLetters:
-            self.temporaryBuffer += data
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "" and appropriate:
-            self.currentToken = {"type": tokenTypes["EndTag"],
-                                 "name": self.temporaryBuffer,
-                                 "data": [], "selfClosing": False}
-            self.emitCurrentToken()
-            self.state = self.dataState
-        elif data in asciiLetters:
-            self.temporaryBuffer += data
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "" and appropriate:
-            self.currentToken = {"type": tokenTypes["EndTag"],
-                                 "name": self.temporaryBuffer,
-                                 "data": [], "selfClosing": False}
-            self.emitCurrentToken()
-            self.state = self.dataState
-        elif data in asciiLetters:
-            self.temporaryBuffer += data
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
-            self.state = self.scriptDataState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-            self.state = self.scriptDataEscapedState
-        elif data == EOF:
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            self.state = self.scriptDataEscapedState
-        return True
-
-    def scriptDataEscapedLessThanSignState(self):
-        data = self.stream.char()
-        if data == "/":
-            self.temporaryBuffer = ""
-            self.state = self.scriptDataEscapedEndTagOpenState
-        elif data in asciiLetters:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data})
-            self.temporaryBuffer = data
-            self.state = self.scriptDataDoubleEscapeStartState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.stream.unget(data)
-            self.state = self.scriptDataEscapedState
-        return True
-
-    def scriptDataEscapedEndTagOpenState(self):
-        data = self.stream.char()
-        if data in asciiLetters:
-            self.temporaryBuffer = data
-            self.state = self.scriptDataEscapedEndTagNameState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "" and appropriate:
-            self.currentToken = {"type": tokenTypes["EndTag"],
-                                 "name": self.temporaryBuffer,
-                                 "data": [], "selfClosing": False}
-            self.emitCurrentToken()
-            self.state = self.dataState
-        elif data in asciiLetters:
-            self.temporaryBuffer += data
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": ""))):
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            if self.temporaryBuffer.lower() == "script":
-                self.state = self.scriptDataDoubleEscapedState
-            else:
-                self.state = self.scriptDataEscapedState
-        elif data in asciiLetters:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            self.temporaryBuffer += data
-        else:
-            self.stream.unget(data)
-            self.state = self.scriptDataEscapedState
-        return True
-
-    def scriptDataDoubleEscapedState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
-            self.state = self.scriptDataDoubleEscapedDashState
-        elif data == "<":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.state = self.scriptDataDoubleEscapedLessThanSignState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-        elif data == EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-script-in-script"})
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-        return True
-
-    def scriptDataDoubleEscapedDashState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
-            self.state = self.scriptDataDoubleEscapedDashDashState
-        elif data == "<":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.state = self.scriptDataDoubleEscapedLessThanSignState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-            self.state = self.scriptDataDoubleEscapedState
-        elif data == EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-script-in-script"})
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            self.state = self.scriptDataDoubleEscapedState
-        return True
-
-    def scriptDataDoubleEscapedDashDashState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"})
-        elif data == "<":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"})
-            self.state = self.scriptDataDoubleEscapedLessThanSignState
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"})
-            self.state = self.scriptDataState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": "\uFFFD"})
-            self.state = self.scriptDataDoubleEscapedState
-        elif data == EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-script-in-script"})
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            self.state = self.scriptDataDoubleEscapedState
-        return True
-
-    def scriptDataDoubleEscapedLessThanSignState(self):
-        data = self.stream.char()
-        if data == "/":
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"})
-            self.temporaryBuffer = ""
-            self.state = self.scriptDataDoubleEscapeEndState
-        else:
-            self.stream.unget(data)
-            self.state = self.scriptDataDoubleEscapedState
-        return True
-
-    def scriptDataDoubleEscapeEndState(self):
-        data = self.stream.char()
-        if data in (spaceCharacters | frozenset(("/", ">"))):
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            if self.temporaryBuffer.lower() == "script":
-                self.state = self.scriptDataEscapedState
-            else:
-                self.state = self.scriptDataDoubleEscapedState
-        elif data in asciiLetters:
-            self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data})
-            self.temporaryBuffer += data
-        else:
-            self.stream.unget(data)
-            self.state = self.scriptDataDoubleEscapedState
-        return True
-
-    def beforeAttributeNameState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.stream.charsUntil(spaceCharacters, True)
-        elif data in asciiLetters:
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        elif data == ">":
-            self.emitCurrentToken()
-        elif data == "/":
-            self.state = self.selfClosingStartTagState
-        elif data in ("'", '"', "=", "<"):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "invalid-character-in-attribute-name"})
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"].append(["\uFFFD", ""])
-            self.state = self.attributeNameState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-attribute-name-but-got-eof"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        return True
-
-    def attributeNameState(self):
-        data = self.stream.char()
-        leavingThisState = True
-        emitToken = False
-        if data == "=":
-            self.state = self.beforeAttributeValueState
-        elif data in asciiLetters:
-            self.currentToken["data"][-1][0] += data +\
-                self.stream.charsUntil(asciiLetters, True)
-            leavingThisState = False
-        elif data == ">":
-            # XXX If we emit here the attributes are converted to a dict
-            # without being checked and when the code below runs we error
-            # because data is a dict not a list
-            emitToken = True
-        elif data in spaceCharacters:
-            self.state = self.afterAttributeNameState
-        elif data == "/":
-            self.state = self.selfClosingStartTagState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"][-1][0] += "\uFFFD"
-            leavingThisState = False
-        elif data in ("'", '"', "<"):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data":
-                                    "invalid-character-in-attribute-name"})
-            self.currentToken["data"][-1][0] += data
-            leavingThisState = False
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "eof-in-attribute-name"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"][-1][0] += data
-            leavingThisState = False
-
-        if leavingThisState:
-            # Attributes are not dropped at this stage. That happens when the
-            # start tag token is emitted so values can still be safely appended
-            # to attributes, but we do want to report the parse error in time.
-            self.currentToken["data"][-1][0] = (
-                self.currentToken["data"][-1][0].translate(asciiUpper2Lower))
-            for name, _ in self.currentToken["data"][:-1]:
-                if self.currentToken["data"][-1][0] == name:
-                    self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                            "duplicate-attribute"})
-                    break
-            # XXX Fix for above XXX
-            if emitToken:
-                self.emitCurrentToken()
-        return True
-
-    def afterAttributeNameState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.stream.charsUntil(spaceCharacters, True)
-        elif data == "=":
-            self.state = self.beforeAttributeValueState
-        elif data == ">":
-            self.emitCurrentToken()
-        elif data in asciiLetters:
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        elif data == "/":
-            self.state = self.selfClosingStartTagState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"].append(["\uFFFD", ""])
-            self.state = self.attributeNameState
-        elif data in ("'", '"', "<"):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "invalid-character-after-attribute-name"})
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-end-of-tag-but-got-eof"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"].append([data, ""])
-            self.state = self.attributeNameState
-        return True
-
-    def beforeAttributeValueState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.stream.charsUntil(spaceCharacters, True)
-        elif data == "\"":
-            self.state = self.attributeValueDoubleQuotedState
-        elif data == "&":
-            self.state = self.attributeValueUnQuotedState
-            self.stream.unget(data)
-        elif data == "'":
-            self.state = self.attributeValueSingleQuotedState
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-attribute-value-but-got-right-bracket"})
-            self.emitCurrentToken()
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"][-1][1] += "\uFFFD"
-            self.state = self.attributeValueUnQuotedState
-        elif data in ("=", "<", "`"):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "equals-in-unquoted-attribute-value"})
-            self.currentToken["data"][-1][1] += data
-            self.state = self.attributeValueUnQuotedState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-attribute-value-but-got-eof"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"][-1][1] += data
-            self.state = self.attributeValueUnQuotedState
-        return True
-
-    def attributeValueDoubleQuotedState(self):
-        data = self.stream.char()
-        if data == "\"":
-            self.state = self.afterAttributeValueState
-        elif data == "&":
-            self.processEntityInAttribute('"')
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"][-1][1] += "\uFFFD"
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-attribute-value-double-quote"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"][-1][1] += data +\
-                self.stream.charsUntil(("\"", "&", "\u0000"))
-        return True
-
-    def attributeValueSingleQuotedState(self):
-        data = self.stream.char()
-        if data == "'":
-            self.state = self.afterAttributeValueState
-        elif data == "&":
-            self.processEntityInAttribute("'")
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"][-1][1] += "\uFFFD"
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-attribute-value-single-quote"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"][-1][1] += data +\
-                self.stream.charsUntil(("'", "&", "\u0000"))
-        return True
-
-    def attributeValueUnQuotedState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.beforeAttributeNameState
-        elif data == "&":
-            self.processEntityInAttribute(">")
-        elif data == ">":
-            self.emitCurrentToken()
-        elif data in ('"', "'", "=", "<", "`"):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-character-in-unquoted-attribute-value"})
-            self.currentToken["data"][-1][1] += data
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"][-1][1] += "\uFFFD"
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-attribute-value-no-quotes"})
-            self.state = self.dataState
-        else:
-            self.currentToken["data"][-1][1] += data + self.stream.charsUntil(
-                frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters)
-        return True
-
-    def afterAttributeValueState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.beforeAttributeNameState
-        elif data == ">":
-            self.emitCurrentToken()
-        elif data == "/":
-            self.state = self.selfClosingStartTagState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-EOF-after-attribute-value"})
-            self.stream.unget(data)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-character-after-attribute-value"})
-            self.stream.unget(data)
-            self.state = self.beforeAttributeNameState
-        return True
-
-    def selfClosingStartTagState(self):
-        data = self.stream.char()
-        if data == ">":
-            self.currentToken["selfClosing"] = True
-            self.emitCurrentToken()
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data":
-                                    "unexpected-EOF-after-solidus-in-tag"})
-            self.stream.unget(data)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-character-after-solidus-in-tag"})
-            self.stream.unget(data)
-            self.state = self.beforeAttributeNameState
-        return True
-
-    def bogusCommentState(self):
-        # Make a new comment token and give it as value all the characters
-        # until the first > or EOF (charsUntil checks for EOF automatically)
-        # and emit it.
-        data = self.stream.charsUntil(">")
-        data = data.replace("\u0000", "\uFFFD")
-        self.tokenQueue.append(
-            {"type": tokenTypes["Comment"], "data": data})
-
-        # Eat the character directly after the bogus comment which is either a
-        # ">" or an EOF.
-        self.stream.char()
-        self.state = self.dataState
-        return True
-
-    def markupDeclarationOpenState(self):
-        charStack = [self.stream.char()]
-        if charStack[-1] == "-":
-            charStack.append(self.stream.char())
-            if charStack[-1] == "-":
-                self.currentToken = {"type": tokenTypes["Comment"], "data": ""}
-                self.state = self.commentStartState
-                return True
-        elif charStack[-1] in ('d', 'D'):
-            matched = True
-            for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),
-                             ('y', 'Y'), ('p', 'P'), ('e', 'E')):
-                charStack.append(self.stream.char())
-                if charStack[-1] not in expected:
-                    matched = False
-                    break
-            if matched:
-                self.currentToken = {"type": tokenTypes["Doctype"],
-                                     "name": "",
-                                     "publicId": None, "systemId": None,
-                                     "correct": True}
-                self.state = self.doctypeState
-                return True
-        elif (charStack[-1] == "[" and
-              self.parser is not None and
-              self.parser.tree.openElements and
-              self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):
-            matched = True
-            for expected in ["C", "D", "A", "T", "A", "["]:
-                charStack.append(self.stream.char())
-                if charStack[-1] != expected:
-                    matched = False
-                    break
-            if matched:
-                self.state = self.cdataSectionState
-                return True
-
-        self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                "expected-dashes-or-doctype"})
-
-        while charStack:
-            self.stream.unget(charStack.pop())
-        self.state = self.bogusCommentState
-        return True
-
-    def commentStartState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.state = self.commentStartDashState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "incorrect-comment"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-comment"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["data"] += data
-            self.state = self.commentState
-        return True
-
-    def commentStartDashState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.state = self.commentEndState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "-\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "incorrect-comment"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-comment"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["data"] += "-" + data
-            self.state = self.commentState
-        return True
-
-    def commentState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.state = self.commentEndDashState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "\uFFFD"
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "eof-in-comment"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["data"] += data + \
-                self.stream.charsUntil(("-", "\u0000"))
-        return True
-
-    def commentEndDashState(self):
-        data = self.stream.char()
-        if data == "-":
-            self.state = self.commentEndState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "-\uFFFD"
-            self.state = self.commentState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-comment-end-dash"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["data"] += "-" + data
-            self.state = self.commentState
-        return True
-
-    def commentEndState(self):
-        data = self.stream.char()
-        if data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "--\uFFFD"
-            self.state = self.commentState
-        elif data == "!":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-bang-after-double-dash-in-comment"})
-            self.state = self.commentEndBangState
-        elif data == "-":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-dash-after-double-dash-in-comment"})
-            self.currentToken["data"] += data
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-comment-double-dash"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            # XXX
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-comment"})
-            self.currentToken["data"] += "--" + data
-            self.state = self.commentState
-        return True
-
-    def commentEndBangState(self):
-        data = self.stream.char()
-        if data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == "-":
-            self.currentToken["data"] += "--!"
-            self.state = self.commentEndDashState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["data"] += "--!\uFFFD"
-            self.state = self.commentState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-comment-end-bang-state"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["data"] += "--!" + data
-            self.state = self.commentState
-        return True
-
-    def doctypeState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.beforeDoctypeNameState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-doctype-name-but-got-eof"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "need-space-after-doctype"})
-            self.stream.unget(data)
-            self.state = self.beforeDoctypeNameState
-        return True
-
-    def beforeDoctypeNameState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-doctype-name-but-got-right-bracket"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["name"] = "\uFFFD"
-            self.state = self.doctypeNameState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-doctype-name-but-got-eof"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["name"] = data
-            self.state = self.doctypeNameState
-        return True
-
-    def doctypeNameState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
-            self.state = self.afterDoctypeNameState
-        elif data == ">":
-            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["name"] += "\uFFFD"
-            self.state = self.doctypeNameState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype-name"})
-            self.currentToken["correct"] = False
-            self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower)
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["name"] += data
-        return True
-
-    def afterDoctypeNameState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.currentToken["correct"] = False
-            self.stream.unget(data)
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            if data in ("p", "P"):
-                matched = True
-                for expected in (("u", "U"), ("b", "B"), ("l", "L"),
-                                 ("i", "I"), ("c", "C")):
-                    data = self.stream.char()
-                    if data not in expected:
-                        matched = False
-                        break
-                if matched:
-                    self.state = self.afterDoctypePublicKeywordState
-                    return True
-            elif data in ("s", "S"):
-                matched = True
-                for expected in (("y", "Y"), ("s", "S"), ("t", "T"),
-                                 ("e", "E"), ("m", "M")):
-                    data = self.stream.char()
-                    if data not in expected:
-                        matched = False
-                        break
-                if matched:
-                    self.state = self.afterDoctypeSystemKeywordState
-                    return True
-
-            # All the characters read before the current 'data' will be
-            # [a-zA-Z], so they're garbage in the bogus doctype and can be
-            # discarded; only the latest character might be '>' or EOF
-            # and needs to be ungetted
-            self.stream.unget(data)
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "expected-space-or-right-bracket-in-doctype", "datavars":
-                                    {"data": data}})
-            self.currentToken["correct"] = False
-            self.state = self.bogusDoctypeState
-
-        return True
-
-    def afterDoctypePublicKeywordState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.beforeDoctypePublicIdentifierState
-        elif data in ("'", '"'):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.stream.unget(data)
-            self.state = self.beforeDoctypePublicIdentifierState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.stream.unget(data)
-            self.state = self.beforeDoctypePublicIdentifierState
-        return True
-
-    def beforeDoctypePublicIdentifierState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == "\"":
-            self.currentToken["publicId"] = ""
-            self.state = self.doctypePublicIdentifierDoubleQuotedState
-        elif data == "'":
-            self.currentToken["publicId"] = ""
-            self.state = self.doctypePublicIdentifierSingleQuotedState
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-end-of-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["correct"] = False
-            self.state = self.bogusDoctypeState
-        return True
-
-    def doctypePublicIdentifierDoubleQuotedState(self):
-        data = self.stream.char()
-        if data == "\"":
-            self.state = self.afterDoctypePublicIdentifierState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["publicId"] += "\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-end-of-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["publicId"] += data
-        return True
-
-    def doctypePublicIdentifierSingleQuotedState(self):
-        data = self.stream.char()
-        if data == "'":
-            self.state = self.afterDoctypePublicIdentifierState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["publicId"] += "\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-end-of-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["publicId"] += data
-        return True
-
-    def afterDoctypePublicIdentifierState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.betweenDoctypePublicAndSystemIdentifiersState
-        elif data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == '"':
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierDoubleQuotedState
-        elif data == "'":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierSingleQuotedState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["correct"] = False
-            self.state = self.bogusDoctypeState
-        return True
-
-    def betweenDoctypePublicAndSystemIdentifiersState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data == '"':
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierDoubleQuotedState
-        elif data == "'":
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierSingleQuotedState
-        elif data == EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["correct"] = False
-            self.state = self.bogusDoctypeState
-        return True
-
-    def afterDoctypeSystemKeywordState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            self.state = self.beforeDoctypeSystemIdentifierState
-        elif data in ("'", '"'):
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.stream.unget(data)
-            self.state = self.beforeDoctypeSystemIdentifierState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.stream.unget(data)
-            self.state = self.beforeDoctypeSystemIdentifierState
-        return True
-
-    def beforeDoctypeSystemIdentifierState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == "\"":
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierDoubleQuotedState
-        elif data == "'":
-            self.currentToken["systemId"] = ""
-            self.state = self.doctypeSystemIdentifierSingleQuotedState
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.currentToken["correct"] = False
-            self.state = self.bogusDoctypeState
-        return True
-
-    def doctypeSystemIdentifierDoubleQuotedState(self):
-        data = self.stream.char()
-        if data == "\"":
-            self.state = self.afterDoctypeSystemIdentifierState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["systemId"] += "\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-end-of-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["systemId"] += data
-        return True
-
-    def doctypeSystemIdentifierSingleQuotedState(self):
-        data = self.stream.char()
-        if data == "'":
-            self.state = self.afterDoctypeSystemIdentifierState
-        elif data == "\u0000":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                    "data": "invalid-codepoint"})
-            self.currentToken["systemId"] += "\uFFFD"
-        elif data == ">":
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-end-of-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.currentToken["systemId"] += data
-        return True
-
-    def afterDoctypeSystemIdentifierState(self):
-        data = self.stream.char()
-        if data in spaceCharacters:
-            pass
-        elif data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "eof-in-doctype"})
-            self.currentToken["correct"] = False
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            self.tokenQueue.append({"type": tokenTypes["ParseError"], "data":
-                                    "unexpected-char-in-doctype"})
-            self.state = self.bogusDoctypeState
-        return True
-
-    def bogusDoctypeState(self):
-        data = self.stream.char()
-        if data == ">":
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        elif data is EOF:
-            # XXX EMIT
-            self.stream.unget(data)
-            self.tokenQueue.append(self.currentToken)
-            self.state = self.dataState
-        else:
-            pass
-        return True
-
-    def cdataSectionState(self):
-        data = []
-        while True:
-            data.append(self.stream.charsUntil("]"))
-            data.append(self.stream.charsUntil(">"))
-            char = self.stream.char()
-            if char == EOF:
-                break
-            else:
-                assert char == ">"
-                if data[-1][-2:] == "]]":
-                    data[-1] = data[-1][:-2]
-                    break
-                else:
-                    data.append(char)
-
-        data = "".join(data)  # pylint:disable=redefined-variable-type
-        # Deal with null here rather than in the parser
-        nullCount = data.count("\u0000")
-        if nullCount > 0:
-            for _ in range(nullCount):
-                self.tokenQueue.append({"type": tokenTypes["ParseError"],
-                                        "data": "invalid-codepoint"})
-            data = data.replace("\u0000", "\uFFFD")
-        if data:
-            self.tokenQueue.append({"type": tokenTypes["Characters"],
-                                    "data": data})
-        self.state = self.dataState
-        return True
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py
deleted file mode 100644
index a5ba4bf..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from .py import Trie as PyTrie
-
-Trie = PyTrie
-
-# pylint:disable=wrong-import-position
-try:
-    from .datrie import Trie as DATrie
-except ImportError:
-    pass
-else:
-    Trie = DATrie
-# pylint:enable=wrong-import-position
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py
deleted file mode 100644
index 6b71975..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/_base.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-try:
-    from collections.abc import Mapping
-except ImportError:  # Python 2.7
-    from collections import Mapping
-
-
-class Trie(Mapping):
-    """Abstract base class for tries"""
-
-    def keys(self, prefix=None):
-        # pylint:disable=arguments-differ
-        keys = super(Trie, self).keys()
-
-        if prefix is None:
-            return set(keys)
-
-        return {x for x in keys if x.startswith(prefix)}
-
-    def has_keys_with_prefix(self, prefix):
-        for key in self.keys():
-            if key.startswith(prefix):
-                return True
-
-        return False
-
-    def longest_prefix(self, prefix):
-        if prefix in self:
-            return prefix
-
-        for i in range(1, len(prefix) + 1):
-            if prefix[:-i] in self:
-                return prefix[:-i]
-
-        raise KeyError(prefix)
-
-    def longest_prefix_item(self, prefix):
-        lprefix = self.longest_prefix(prefix)
-        return (lprefix, self[lprefix])
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py
deleted file mode 100644
index e2e5f86..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/datrie.py
+++ /dev/null
@@ -1,44 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from datrie import Trie as DATrie
-from pip._vendor.six import text_type
-
-from ._base import Trie as ABCTrie
-
-
-class Trie(ABCTrie):
-    def __init__(self, data):
-        chars = set()
-        for key in data.keys():
-            if not isinstance(key, text_type):
-                raise TypeError("All keys must be strings")
-            for char in key:
-                chars.add(char)
-
-        self._data = DATrie("".join(chars))
-        for key, value in data.items():
-            self._data[key] = value
-
-    def __contains__(self, key):
-        return key in self._data
-
-    def __len__(self):
-        return len(self._data)
-
-    def __iter__(self):
-        raise NotImplementedError()
-
-    def __getitem__(self, key):
-        return self._data[key]
-
-    def keys(self, prefix=None):
-        return self._data.keys(prefix)
-
-    def has_keys_with_prefix(self, prefix):
-        return self._data.has_keys_with_prefix(prefix)
-
-    def longest_prefix(self, prefix):
-        return self._data.longest_prefix(prefix)
-
-    def longest_prefix_item(self, prefix):
-        return self._data.longest_prefix_item(prefix)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py
deleted file mode 100644
index c178b21..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_trie/py.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-from pip._vendor.six import text_type
-
-from bisect import bisect_left
-
-from ._base import Trie as ABCTrie
-
-
-class Trie(ABCTrie):
-    def __init__(self, data):
-        if not all(isinstance(x, text_type) for x in data.keys()):
-            raise TypeError("All keys must be strings")
-
-        self._data = data
-        self._keys = sorted(data.keys())
-        self._cachestr = ""
-        self._cachepoints = (0, len(data))
-
-    def __contains__(self, key):
-        return key in self._data
-
-    def __len__(self):
-        return len(self._data)
-
-    def __iter__(self):
-        return iter(self._data)
-
-    def __getitem__(self, key):
-        return self._data[key]
-
-    def keys(self, prefix=None):
-        if prefix is None or prefix == "" or not self._keys:
-            return set(self._keys)
-
-        if prefix.startswith(self._cachestr):
-            lo, hi = self._cachepoints
-            start = i = bisect_left(self._keys, prefix, lo, hi)
-        else:
-            start = i = bisect_left(self._keys, prefix)
-
-        keys = set()
-        if start == len(self._keys):
-            return keys
-
-        while self._keys[i].startswith(prefix):
-            keys.add(self._keys[i])
-            i += 1
-
-        self._cachestr = prefix
-        self._cachepoints = (start, i)
-
-        return keys
-
-    def has_keys_with_prefix(self, prefix):
-        if prefix in self._data:
-            return True
-
-        if prefix.startswith(self._cachestr):
-            lo, hi = self._cachepoints
-            i = bisect_left(self._keys, prefix, lo, hi)
-        else:
-            i = bisect_left(self._keys, prefix)
-
-        if i == len(self._keys):
-            return False
-
-        return self._keys[i].startswith(prefix)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py
deleted file mode 100644
index 0703afb..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/_utils.py
+++ /dev/null
@@ -1,124 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from types import ModuleType
-
-from pip._vendor.six import text_type
-
-try:
-    import xml.etree.cElementTree as default_etree
-except ImportError:
-    import xml.etree.ElementTree as default_etree
-
-
-__all__ = ["default_etree", "MethodDispatcher", "isSurrogatePair",
-           "surrogatePairToCodepoint", "moduleFactoryFactory",
-           "supports_lone_surrogates"]
-
-
-# Platforms not supporting lone surrogates (\uD800-\uDFFF) should be
-# caught by the below test. In general this would be any platform
-# using UTF-16 as its encoding of unicode strings, such as
-# Jython. This is because UTF-16 itself is based on the use of such
-# surrogates, and there is no mechanism to further escape such
-# escapes.
-try:
-    _x = eval('"\\uD800"')  # pylint:disable=eval-used
-    if not isinstance(_x, text_type):
-        # We need this with u"" because of http://bugs.jython.org/issue2039
-        _x = eval('u"\\uD800"')  # pylint:disable=eval-used
-        assert isinstance(_x, text_type)
-except:  # pylint:disable=bare-except
-    supports_lone_surrogates = False
-else:
-    supports_lone_surrogates = True
-
-
-class MethodDispatcher(dict):
-    """Dict with 2 special properties:
-
-    On initiation, keys that are lists, sets or tuples are converted to
-    multiple keys so accessing any one of the items in the original
-    list-like object returns the matching value
-
-    md = MethodDispatcher({("foo", "bar"):"baz"})
-    md["foo"] == "baz"
-
-    A default value which can be set through the default attribute.
-    """
-
-    def __init__(self, items=()):
-        # Using _dictEntries instead of directly assigning to self is about
-        # twice as fast. Please do careful performance testing before changing
-        # anything here.
-        _dictEntries = []
-        for name, value in items:
-            if isinstance(name, (list, tuple, frozenset, set)):
-                for item in name:
-                    _dictEntries.append((item, value))
-            else:
-                _dictEntries.append((name, value))
-        dict.__init__(self, _dictEntries)
-        assert len(self) == len(_dictEntries)
-        self.default = None
-
-    def __getitem__(self, key):
-        return dict.get(self, key, self.default)
-
-
-# Some utility functions to deal with weirdness around UCS2 vs UCS4
-# python builds
-
-def isSurrogatePair(data):
-    return (len(data) == 2 and
-            ord(data[0]) >= 0xD800 and ord(data[0]) <= 0xDBFF and
-            ord(data[1]) >= 0xDC00 and ord(data[1]) <= 0xDFFF)
-
-
-def surrogatePairToCodepoint(data):
-    char_val = (0x10000 + (ord(data[0]) - 0xD800) * 0x400 +
-                (ord(data[1]) - 0xDC00))
-    return char_val
-
-# Module Factory Factory (no, this isn't Java, I know)
-# Here to stop this being duplicated all over the place.
-
-
-def moduleFactoryFactory(factory):
-    moduleCache = {}
-
-    def moduleFactory(baseModule, *args, **kwargs):
-        if isinstance(ModuleType.__name__, type("")):
-            name = "_%s_factory" % baseModule.__name__
-        else:
-            name = b"_%s_factory" % baseModule.__name__
-
-        kwargs_tuple = tuple(kwargs.items())
-
-        try:
-            return moduleCache[name][args][kwargs_tuple]
-        except KeyError:
-            mod = ModuleType(name)
-            objs = factory(baseModule, *args, **kwargs)
-            mod.__dict__.update(objs)
-            if "name" not in moduleCache:
-                moduleCache[name] = {}
-            if "args" not in moduleCache[name]:
-                moduleCache[name][args] = {}
-            if "kwargs" not in moduleCache[name][args]:
-                moduleCache[name][args][kwargs_tuple] = {}
-            moduleCache[name][args][kwargs_tuple] = mod
-            return mod
-
-    return moduleFactory
-
-
-def memoize(func):
-    cache = {}
-
-    def wrapped(*args, **kwargs):
-        key = (tuple(args), tuple(kwargs.items()))
-        if key not in cache:
-            cache[key] = func(*args, **kwargs)
-        return cache[key]
-
-    return wrapped
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py
deleted file mode 100644
index 1ff8041..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/constants.py
+++ /dev/null
@@ -1,2947 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-import string
-
-EOF = None
-
-E = {
-    "null-character":
-        "Null character in input stream, replaced with U+FFFD.",
-    "invalid-codepoint":
-        "Invalid codepoint in stream.",
-    "incorrectly-placed-solidus":
-        "Solidus (/) incorrectly placed in tag.",
-    "incorrect-cr-newline-entity":
-        "Incorrect CR newline entity, replaced with LF.",
-    "illegal-windows-1252-entity":
-        "Entity used with illegal number (windows-1252 reference).",
-    "cant-convert-numeric-entity":
-        "Numeric entity couldn't be converted to character "
-        "(codepoint U+%(charAsInt)08x).",
-    "illegal-codepoint-for-numeric-entity":
-        "Numeric entity represents an illegal codepoint: "
-        "U+%(charAsInt)08x.",
-    "numeric-entity-without-semicolon":
-        "Numeric entity didn't end with ';'.",
-    "expected-numeric-entity-but-got-eof":
-        "Numeric entity expected. Got end of file instead.",
-    "expected-numeric-entity":
-        "Numeric entity expected but none found.",
-    "named-entity-without-semicolon":
-        "Named entity didn't end with ';'.",
-    "expected-named-entity":
-        "Named entity expected. Got none.",
-    "attributes-in-end-tag":
-        "End tag contains unexpected attributes.",
-    'self-closing-flag-on-end-tag':
-        "End tag contains unexpected self-closing flag.",
-    "expected-tag-name-but-got-right-bracket":
-        "Expected tag name. Got '>' instead.",
-    "expected-tag-name-but-got-question-mark":
-        "Expected tag name. Got '?' instead. (HTML doesn't "
-        "support processing instructions.)",
-    "expected-tag-name":
-        "Expected tag name. Got something else instead",
-    "expected-closing-tag-but-got-right-bracket":
-        "Expected closing tag. Got '>' instead. Ignoring ''.",
-    "expected-closing-tag-but-got-eof":
-        "Expected closing tag. Unexpected end of file.",
-    "expected-closing-tag-but-got-char":
-        "Expected closing tag. Unexpected character '%(data)s' found.",
-    "eof-in-tag-name":
-        "Unexpected end of file in the tag name.",
-    "expected-attribute-name-but-got-eof":
-        "Unexpected end of file. Expected attribute name instead.",
-    "eof-in-attribute-name":
-        "Unexpected end of file in attribute name.",
-    "invalid-character-in-attribute-name":
-        "Invalid character in attribute name",
-    "duplicate-attribute":
-        "Dropped duplicate attribute on tag.",
-    "expected-end-of-tag-name-but-got-eof":
-        "Unexpected end of file. Expected = or end of tag.",
-    "expected-attribute-value-but-got-eof":
-        "Unexpected end of file. Expected attribute value.",
-    "expected-attribute-value-but-got-right-bracket":
-        "Expected attribute value. Got '>' instead.",
-    'equals-in-unquoted-attribute-value':
-        "Unexpected = in unquoted attribute",
-    'unexpected-character-in-unquoted-attribute-value':
-        "Unexpected character in unquoted attribute",
-    "invalid-character-after-attribute-name":
-        "Unexpected character after attribute name.",
-    "unexpected-character-after-attribute-value":
-        "Unexpected character after attribute value.",
-    "eof-in-attribute-value-double-quote":
-        "Unexpected end of file in attribute value (\").",
-    "eof-in-attribute-value-single-quote":
-        "Unexpected end of file in attribute value (').",
-    "eof-in-attribute-value-no-quotes":
-        "Unexpected end of file in attribute value.",
-    "unexpected-EOF-after-solidus-in-tag":
-        "Unexpected end of file in tag. Expected >",
-    "unexpected-character-after-solidus-in-tag":
-        "Unexpected character after / in tag. Expected >",
-    "expected-dashes-or-doctype":
-        "Expected '--' or 'DOCTYPE'. Not found.",
-    "unexpected-bang-after-double-dash-in-comment":
-        "Unexpected ! after -- in comment",
-    "unexpected-space-after-double-dash-in-comment":
-        "Unexpected space after -- in comment",
-    "incorrect-comment":
-        "Incorrect comment.",
-    "eof-in-comment":
-        "Unexpected end of file in comment.",
-    "eof-in-comment-end-dash":
-        "Unexpected end of file in comment (-)",
-    "unexpected-dash-after-double-dash-in-comment":
-        "Unexpected '-' after '--' found in comment.",
-    "eof-in-comment-double-dash":
-        "Unexpected end of file in comment (--).",
-    "eof-in-comment-end-space-state":
-        "Unexpected end of file in comment.",
-    "eof-in-comment-end-bang-state":
-        "Unexpected end of file in comment.",
-    "unexpected-char-in-comment":
-        "Unexpected character in comment found.",
-    "need-space-after-doctype":
-        "No space after literal string 'DOCTYPE'.",
-    "expected-doctype-name-but-got-right-bracket":
-        "Unexpected > character. Expected DOCTYPE name.",
-    "expected-doctype-name-but-got-eof":
-        "Unexpected end of file. Expected DOCTYPE name.",
-    "eof-in-doctype-name":
-        "Unexpected end of file in DOCTYPE name.",
-    "eof-in-doctype":
-        "Unexpected end of file in DOCTYPE.",
-    "expected-space-or-right-bracket-in-doctype":
-        "Expected space or '>'. Got '%(data)s'",
-    "unexpected-end-of-doctype":
-        "Unexpected end of DOCTYPE.",
-    "unexpected-char-in-doctype":
-        "Unexpected character in DOCTYPE.",
-    "eof-in-innerhtml":
-        "XXX innerHTML EOF",
-    "unexpected-doctype":
-        "Unexpected DOCTYPE. Ignored.",
-    "non-html-root":
-        "html needs to be the first start tag.",
-    "expected-doctype-but-got-eof":
-        "Unexpected End of file. Expected DOCTYPE.",
-    "unknown-doctype":
-        "Erroneous DOCTYPE.",
-    "expected-doctype-but-got-chars":
-        "Unexpected non-space characters. Expected DOCTYPE.",
-    "expected-doctype-but-got-start-tag":
-        "Unexpected start tag (%(name)s). Expected DOCTYPE.",
-    "expected-doctype-but-got-end-tag":
-        "Unexpected end tag (%(name)s). Expected DOCTYPE.",
-    "end-tag-after-implied-root":
-        "Unexpected end tag (%(name)s) after the (implied) root element.",
-    "expected-named-closing-tag-but-got-eof":
-        "Unexpected end of file. Expected end tag (%(name)s).",
-    "two-heads-are-not-better-than-one":
-        "Unexpected start tag head in existing head. Ignored.",
-    "unexpected-end-tag":
-        "Unexpected end tag (%(name)s). Ignored.",
-    "unexpected-start-tag-out-of-my-head":
-        "Unexpected start tag (%(name)s) that can be in head. Moved.",
-    "unexpected-start-tag":
-        "Unexpected start tag (%(name)s).",
-    "missing-end-tag":
-        "Missing end tag (%(name)s).",
-    "missing-end-tags":
-        "Missing end tags (%(name)s).",
-    "unexpected-start-tag-implies-end-tag":
-        "Unexpected start tag (%(startName)s) "
-        "implies end tag (%(endName)s).",
-    "unexpected-start-tag-treated-as":
-        "Unexpected start tag (%(originalName)s). Treated as %(newName)s.",
-    "deprecated-tag":
-        "Unexpected start tag %(name)s. Don't use it!",
-    "unexpected-start-tag-ignored":
-        "Unexpected start tag %(name)s. Ignored.",
-    "expected-one-end-tag-but-got-another":
-        "Unexpected end tag (%(gotName)s). "
-        "Missing end tag (%(expectedName)s).",
-    "end-tag-too-early":
-        "End tag (%(name)s) seen too early. Expected other end tag.",
-    "end-tag-too-early-named":
-        "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).",
-    "end-tag-too-early-ignored":
-        "End tag (%(name)s) seen too early. Ignored.",
-    "adoption-agency-1.1":
-        "End tag (%(name)s) violates step 1, "
-        "paragraph 1 of the adoption agency algorithm.",
-    "adoption-agency-1.2":
-        "End tag (%(name)s) violates step 1, "
-        "paragraph 2 of the adoption agency algorithm.",
-    "adoption-agency-1.3":
-        "End tag (%(name)s) violates step 1, "
-        "paragraph 3 of the adoption agency algorithm.",
-    "adoption-agency-4.4":
-        "End tag (%(name)s) violates step 4, "
-        "paragraph 4 of the adoption agency algorithm.",
-    "unexpected-end-tag-treated-as":
-        "Unexpected end tag (%(originalName)s). Treated as %(newName)s.",
-    "no-end-tag":
-        "This element (%(name)s) has no end tag.",
-    "unexpected-implied-end-tag-in-table":
-        "Unexpected implied end tag (%(name)s) in the table phase.",
-    "unexpected-implied-end-tag-in-table-body":
-        "Unexpected implied end tag (%(name)s) in the table body phase.",
-    "unexpected-char-implies-table-voodoo":
-        "Unexpected non-space characters in "
-        "table context caused voodoo mode.",
-    "unexpected-hidden-input-in-table":
-        "Unexpected input with type hidden in table context.",
-    "unexpected-form-in-table":
-        "Unexpected form in table context.",
-    "unexpected-start-tag-implies-table-voodoo":
-        "Unexpected start tag (%(name)s) in "
-        "table context caused voodoo mode.",
-    "unexpected-end-tag-implies-table-voodoo":
-        "Unexpected end tag (%(name)s) in "
-        "table context caused voodoo mode.",
-    "unexpected-cell-in-table-body":
-        "Unexpected table cell start tag (%(name)s) "
-        "in the table body phase.",
-    "unexpected-cell-end-tag":
-        "Got table cell end tag (%(name)s) "
-        "while required end tags are missing.",
-    "unexpected-end-tag-in-table-body":
-        "Unexpected end tag (%(name)s) in the table body phase. Ignored.",
-    "unexpected-implied-end-tag-in-table-row":
-        "Unexpected implied end tag (%(name)s) in the table row phase.",
-    "unexpected-end-tag-in-table-row":
-        "Unexpected end tag (%(name)s) in the table row phase. Ignored.",
-    "unexpected-select-in-select":
-        "Unexpected select start tag in the select phase "
-        "treated as select end tag.",
-    "unexpected-input-in-select":
-        "Unexpected input start tag in the select phase.",
-    "unexpected-start-tag-in-select":
-        "Unexpected start tag token (%(name)s in the select phase. "
-        "Ignored.",
-    "unexpected-end-tag-in-select":
-        "Unexpected end tag (%(name)s) in the select phase. Ignored.",
-    "unexpected-table-element-start-tag-in-select-in-table":
-        "Unexpected table element start tag (%(name)s) in the select in table phase.",
-    "unexpected-table-element-end-tag-in-select-in-table":
-        "Unexpected table element end tag (%(name)s) in the select in table phase.",
-    "unexpected-char-after-body":
-        "Unexpected non-space characters in the after body phase.",
-    "unexpected-start-tag-after-body":
-        "Unexpected start tag token (%(name)s)"
-        " in the after body phase.",
-    "unexpected-end-tag-after-body":
-        "Unexpected end tag token (%(name)s)"
-        " in the after body phase.",
-    "unexpected-char-in-frameset":
-        "Unexpected characters in the frameset phase. Characters ignored.",
-    "unexpected-start-tag-in-frameset":
-        "Unexpected start tag token (%(name)s)"
-        " in the frameset phase. Ignored.",
-    "unexpected-frameset-in-frameset-innerhtml":
-        "Unexpected end tag token (frameset) "
-        "in the frameset phase (innerHTML).",
-    "unexpected-end-tag-in-frameset":
-        "Unexpected end tag token (%(name)s)"
-        " in the frameset phase. Ignored.",
-    "unexpected-char-after-frameset":
-        "Unexpected non-space characters in the "
-        "after frameset phase. Ignored.",
-    "unexpected-start-tag-after-frameset":
-        "Unexpected start tag (%(name)s)"
-        " in the after frameset phase. Ignored.",
-    "unexpected-end-tag-after-frameset":
-        "Unexpected end tag (%(name)s)"
-        " in the after frameset phase. Ignored.",
-    "unexpected-end-tag-after-body-innerhtml":
-        "Unexpected end tag after body(innerHtml)",
-    "expected-eof-but-got-char":
-        "Unexpected non-space characters. Expected end of file.",
-    "expected-eof-but-got-start-tag":
-        "Unexpected start tag (%(name)s)"
-        ". Expected end of file.",
-    "expected-eof-but-got-end-tag":
-        "Unexpected end tag (%(name)s)"
-        ". Expected end of file.",
-    "eof-in-table":
-        "Unexpected end of file. Expected table content.",
-    "eof-in-select":
-        "Unexpected end of file. Expected select content.",
-    "eof-in-frameset":
-        "Unexpected end of file. Expected frameset content.",
-    "eof-in-script-in-script":
-        "Unexpected end of file. Expected script content.",
-    "eof-in-foreign-lands":
-        "Unexpected end of file. Expected foreign content",
-    "non-void-element-with-trailing-solidus":
-        "Trailing solidus not allowed on element %(name)s",
-    "unexpected-html-element-in-foreign-content":
-        "Element %(name)s not allowed in a non-html context",
-    "unexpected-end-tag-before-html":
-        "Unexpected end tag (%(name)s) before html.",
-    "unexpected-inhead-noscript-tag":
-        "Element %(name)s not allowed in a inhead-noscript context",
-    "eof-in-head-noscript":
-        "Unexpected end of file. Expected inhead-noscript content",
-    "char-in-head-noscript":
-        "Unexpected non-space character. Expected inhead-noscript content",
-    "XXX-undefined-error":
-        "Undefined error (this sucks and should be fixed)",
-}
-
-namespaces = {
-    "html": "http://www.w3.org/1999/xhtml",
-    "mathml": "http://www.w3.org/1998/Math/MathML",
-    "svg": "http://www.w3.org/2000/svg",
-    "xlink": "http://www.w3.org/1999/xlink",
-    "xml": "http://www.w3.org/XML/1998/namespace",
-    "xmlns": "http://www.w3.org/2000/xmlns/"
-}
-
-scopingElements = frozenset([
-    (namespaces["html"], "applet"),
-    (namespaces["html"], "caption"),
-    (namespaces["html"], "html"),
-    (namespaces["html"], "marquee"),
-    (namespaces["html"], "object"),
-    (namespaces["html"], "table"),
-    (namespaces["html"], "td"),
-    (namespaces["html"], "th"),
-    (namespaces["mathml"], "mi"),
-    (namespaces["mathml"], "mo"),
-    (namespaces["mathml"], "mn"),
-    (namespaces["mathml"], "ms"),
-    (namespaces["mathml"], "mtext"),
-    (namespaces["mathml"], "annotation-xml"),
-    (namespaces["svg"], "foreignObject"),
-    (namespaces["svg"], "desc"),
-    (namespaces["svg"], "title"),
-])
-
-formattingElements = frozenset([
-    (namespaces["html"], "a"),
-    (namespaces["html"], "b"),
-    (namespaces["html"], "big"),
-    (namespaces["html"], "code"),
-    (namespaces["html"], "em"),
-    (namespaces["html"], "font"),
-    (namespaces["html"], "i"),
-    (namespaces["html"], "nobr"),
-    (namespaces["html"], "s"),
-    (namespaces["html"], "small"),
-    (namespaces["html"], "strike"),
-    (namespaces["html"], "strong"),
-    (namespaces["html"], "tt"),
-    (namespaces["html"], "u")
-])
-
-specialElements = frozenset([
-    (namespaces["html"], "address"),
-    (namespaces["html"], "applet"),
-    (namespaces["html"], "area"),
-    (namespaces["html"], "article"),
-    (namespaces["html"], "aside"),
-    (namespaces["html"], "base"),
-    (namespaces["html"], "basefont"),
-    (namespaces["html"], "bgsound"),
-    (namespaces["html"], "blockquote"),
-    (namespaces["html"], "body"),
-    (namespaces["html"], "br"),
-    (namespaces["html"], "button"),
-    (namespaces["html"], "caption"),
-    (namespaces["html"], "center"),
-    (namespaces["html"], "col"),
-    (namespaces["html"], "colgroup"),
-    (namespaces["html"], "command"),
-    (namespaces["html"], "dd"),
-    (namespaces["html"], "details"),
-    (namespaces["html"], "dir"),
-    (namespaces["html"], "div"),
-    (namespaces["html"], "dl"),
-    (namespaces["html"], "dt"),
-    (namespaces["html"], "embed"),
-    (namespaces["html"], "fieldset"),
-    (namespaces["html"], "figure"),
-    (namespaces["html"], "footer"),
-    (namespaces["html"], "form"),
-    (namespaces["html"], "frame"),
-    (namespaces["html"], "frameset"),
-    (namespaces["html"], "h1"),
-    (namespaces["html"], "h2"),
-    (namespaces["html"], "h3"),
-    (namespaces["html"], "h4"),
-    (namespaces["html"], "h5"),
-    (namespaces["html"], "h6"),
-    (namespaces["html"], "head"),
-    (namespaces["html"], "header"),
-    (namespaces["html"], "hr"),
-    (namespaces["html"], "html"),
-    (namespaces["html"], "iframe"),
-    # Note that image is commented out in the spec as "this isn't an
-    # element that can end up on the stack, so it doesn't matter,"
-    (namespaces["html"], "image"),
-    (namespaces["html"], "img"),
-    (namespaces["html"], "input"),
-    (namespaces["html"], "isindex"),
-    (namespaces["html"], "li"),
-    (namespaces["html"], "link"),
-    (namespaces["html"], "listing"),
-    (namespaces["html"], "marquee"),
-    (namespaces["html"], "menu"),
-    (namespaces["html"], "meta"),
-    (namespaces["html"], "nav"),
-    (namespaces["html"], "noembed"),
-    (namespaces["html"], "noframes"),
-    (namespaces["html"], "noscript"),
-    (namespaces["html"], "object"),
-    (namespaces["html"], "ol"),
-    (namespaces["html"], "p"),
-    (namespaces["html"], "param"),
-    (namespaces["html"], "plaintext"),
-    (namespaces["html"], "pre"),
-    (namespaces["html"], "script"),
-    (namespaces["html"], "section"),
-    (namespaces["html"], "select"),
-    (namespaces["html"], "style"),
-    (namespaces["html"], "table"),
-    (namespaces["html"], "tbody"),
-    (namespaces["html"], "td"),
-    (namespaces["html"], "textarea"),
-    (namespaces["html"], "tfoot"),
-    (namespaces["html"], "th"),
-    (namespaces["html"], "thead"),
-    (namespaces["html"], "title"),
-    (namespaces["html"], "tr"),
-    (namespaces["html"], "ul"),
-    (namespaces["html"], "wbr"),
-    (namespaces["html"], "xmp"),
-    (namespaces["svg"], "foreignObject")
-])
-
-htmlIntegrationPointElements = frozenset([
-    (namespaces["mathml"], "annotation-xml"),
-    (namespaces["svg"], "foreignObject"),
-    (namespaces["svg"], "desc"),
-    (namespaces["svg"], "title")
-])
-
-mathmlTextIntegrationPointElements = frozenset([
-    (namespaces["mathml"], "mi"),
-    (namespaces["mathml"], "mo"),
-    (namespaces["mathml"], "mn"),
-    (namespaces["mathml"], "ms"),
-    (namespaces["mathml"], "mtext")
-])
-
-adjustSVGAttributes = {
-    "attributename": "attributeName",
-    "attributetype": "attributeType",
-    "basefrequency": "baseFrequency",
-    "baseprofile": "baseProfile",
-    "calcmode": "calcMode",
-    "clippathunits": "clipPathUnits",
-    "contentscripttype": "contentScriptType",
-    "contentstyletype": "contentStyleType",
-    "diffuseconstant": "diffuseConstant",
-    "edgemode": "edgeMode",
-    "externalresourcesrequired": "externalResourcesRequired",
-    "filterres": "filterRes",
-    "filterunits": "filterUnits",
-    "glyphref": "glyphRef",
-    "gradienttransform": "gradientTransform",
-    "gradientunits": "gradientUnits",
-    "kernelmatrix": "kernelMatrix",
-    "kernelunitlength": "kernelUnitLength",
-    "keypoints": "keyPoints",
-    "keysplines": "keySplines",
-    "keytimes": "keyTimes",
-    "lengthadjust": "lengthAdjust",
-    "limitingconeangle": "limitingConeAngle",
-    "markerheight": "markerHeight",
-    "markerunits": "markerUnits",
-    "markerwidth": "markerWidth",
-    "maskcontentunits": "maskContentUnits",
-    "maskunits": "maskUnits",
-    "numoctaves": "numOctaves",
-    "pathlength": "pathLength",
-    "patterncontentunits": "patternContentUnits",
-    "patterntransform": "patternTransform",
-    "patternunits": "patternUnits",
-    "pointsatx": "pointsAtX",
-    "pointsaty": "pointsAtY",
-    "pointsatz": "pointsAtZ",
-    "preservealpha": "preserveAlpha",
-    "preserveaspectratio": "preserveAspectRatio",
-    "primitiveunits": "primitiveUnits",
-    "refx": "refX",
-    "refy": "refY",
-    "repeatcount": "repeatCount",
-    "repeatdur": "repeatDur",
-    "requiredextensions": "requiredExtensions",
-    "requiredfeatures": "requiredFeatures",
-    "specularconstant": "specularConstant",
-    "specularexponent": "specularExponent",
-    "spreadmethod": "spreadMethod",
-    "startoffset": "startOffset",
-    "stddeviation": "stdDeviation",
-    "stitchtiles": "stitchTiles",
-    "surfacescale": "surfaceScale",
-    "systemlanguage": "systemLanguage",
-    "tablevalues": "tableValues",
-    "targetx": "targetX",
-    "targety": "targetY",
-    "textlength": "textLength",
-    "viewbox": "viewBox",
-    "viewtarget": "viewTarget",
-    "xchannelselector": "xChannelSelector",
-    "ychannelselector": "yChannelSelector",
-    "zoomandpan": "zoomAndPan"
-}
-
-adjustMathMLAttributes = {"definitionurl": "definitionURL"}
-
-adjustForeignAttributes = {
-    "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
-    "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
-    "xlink:href": ("xlink", "href", namespaces["xlink"]),
-    "xlink:role": ("xlink", "role", namespaces["xlink"]),
-    "xlink:show": ("xlink", "show", namespaces["xlink"]),
-    "xlink:title": ("xlink", "title", namespaces["xlink"]),
-    "xlink:type": ("xlink", "type", namespaces["xlink"]),
-    "xml:base": ("xml", "base", namespaces["xml"]),
-    "xml:lang": ("xml", "lang", namespaces["xml"]),
-    "xml:space": ("xml", "space", namespaces["xml"]),
-    "xmlns": (None, "xmlns", namespaces["xmlns"]),
-    "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
-}
-
-unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
-                                  adjustForeignAttributes.items()])
-
-spaceCharacters = frozenset([
-    "\t",
-    "\n",
-    "\u000C",
-    " ",
-    "\r"
-])
-
-tableInsertModeElements = frozenset([
-    "table",
-    "tbody",
-    "tfoot",
-    "thead",
-    "tr"
-])
-
-asciiLowercase = frozenset(string.ascii_lowercase)
-asciiUppercase = frozenset(string.ascii_uppercase)
-asciiLetters = frozenset(string.ascii_letters)
-digits = frozenset(string.digits)
-hexDigits = frozenset(string.hexdigits)
-
-asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
-                         for c in string.ascii_uppercase])
-
-# Heading elements need to be ordered
-headingElements = (
-    "h1",
-    "h2",
-    "h3",
-    "h4",
-    "h5",
-    "h6"
-)
-
-voidElements = frozenset([
-    "base",
-    "command",
-    "event-source",
-    "link",
-    "meta",
-    "hr",
-    "br",
-    "img",
-    "embed",
-    "param",
-    "area",
-    "col",
-    "input",
-    "source",
-    "track"
-])
-
-cdataElements = frozenset(['title', 'textarea'])
-
-rcdataElements = frozenset([
-    'style',
-    'script',
-    'xmp',
-    'iframe',
-    'noembed',
-    'noframes',
-    'noscript'
-])
-
-booleanAttributes = {
-    "": frozenset(["irrelevant", "itemscope"]),
-    "style": frozenset(["scoped"]),
-    "img": frozenset(["ismap"]),
-    "audio": frozenset(["autoplay", "controls"]),
-    "video": frozenset(["autoplay", "controls"]),
-    "script": frozenset(["defer", "async"]),
-    "details": frozenset(["open"]),
-    "datagrid": frozenset(["multiple", "disabled"]),
-    "command": frozenset(["hidden", "disabled", "checked", "default"]),
-    "hr": frozenset(["noshade"]),
-    "menu": frozenset(["autosubmit"]),
-    "fieldset": frozenset(["disabled", "readonly"]),
-    "option": frozenset(["disabled", "readonly", "selected"]),
-    "optgroup": frozenset(["disabled", "readonly"]),
-    "button": frozenset(["disabled", "autofocus"]),
-    "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),
-    "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),
-    "output": frozenset(["disabled", "readonly"]),
-    "iframe": frozenset(["seamless"]),
-}
-
-# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
-# therefore can't be a frozenset.
-entitiesWindows1252 = (
-    8364,   # 0x80  0x20AC  EURO SIGN
-    65533,  # 0x81          UNDEFINED
-    8218,   # 0x82  0x201A  SINGLE LOW-9 QUOTATION MARK
-    402,    # 0x83  0x0192  LATIN SMALL LETTER F WITH HOOK
-    8222,   # 0x84  0x201E  DOUBLE LOW-9 QUOTATION MARK
-    8230,   # 0x85  0x2026  HORIZONTAL ELLIPSIS
-    8224,   # 0x86  0x2020  DAGGER
-    8225,   # 0x87  0x2021  DOUBLE DAGGER
-    710,    # 0x88  0x02C6  MODIFIER LETTER CIRCUMFLEX ACCENT
-    8240,   # 0x89  0x2030  PER MILLE SIGN
-    352,    # 0x8A  0x0160  LATIN CAPITAL LETTER S WITH CARON
-    8249,   # 0x8B  0x2039  SINGLE LEFT-POINTING ANGLE QUOTATION MARK
-    338,    # 0x8C  0x0152  LATIN CAPITAL LIGATURE OE
-    65533,  # 0x8D          UNDEFINED
-    381,    # 0x8E  0x017D  LATIN CAPITAL LETTER Z WITH CARON
-    65533,  # 0x8F          UNDEFINED
-    65533,  # 0x90          UNDEFINED
-    8216,   # 0x91  0x2018  LEFT SINGLE QUOTATION MARK
-    8217,   # 0x92  0x2019  RIGHT SINGLE QUOTATION MARK
-    8220,   # 0x93  0x201C  LEFT DOUBLE QUOTATION MARK
-    8221,   # 0x94  0x201D  RIGHT DOUBLE QUOTATION MARK
-    8226,   # 0x95  0x2022  BULLET
-    8211,   # 0x96  0x2013  EN DASH
-    8212,   # 0x97  0x2014  EM DASH
-    732,    # 0x98  0x02DC  SMALL TILDE
-    8482,   # 0x99  0x2122  TRADE MARK SIGN
-    353,    # 0x9A  0x0161  LATIN SMALL LETTER S WITH CARON
-    8250,   # 0x9B  0x203A  SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
-    339,    # 0x9C  0x0153  LATIN SMALL LIGATURE OE
-    65533,  # 0x9D          UNDEFINED
-    382,    # 0x9E  0x017E  LATIN SMALL LETTER Z WITH CARON
-    376     # 0x9F  0x0178  LATIN CAPITAL LETTER Y WITH DIAERESIS
-)
-
-xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])
-
-entities = {
-    "AElig": "\xc6",
-    "AElig;": "\xc6",
-    "AMP": "&",
-    "AMP;": "&",
-    "Aacute": "\xc1",
-    "Aacute;": "\xc1",
-    "Abreve;": "\u0102",
-    "Acirc": "\xc2",
-    "Acirc;": "\xc2",
-    "Acy;": "\u0410",
-    "Afr;": "\U0001d504",
-    "Agrave": "\xc0",
-    "Agrave;": "\xc0",
-    "Alpha;": "\u0391",
-    "Amacr;": "\u0100",
-    "And;": "\u2a53",
-    "Aogon;": "\u0104",
-    "Aopf;": "\U0001d538",
-    "ApplyFunction;": "\u2061",
-    "Aring": "\xc5",
-    "Aring;": "\xc5",
-    "Ascr;": "\U0001d49c",
-    "Assign;": "\u2254",
-    "Atilde": "\xc3",
-    "Atilde;": "\xc3",
-    "Auml": "\xc4",
-    "Auml;": "\xc4",
-    "Backslash;": "\u2216",
-    "Barv;": "\u2ae7",
-    "Barwed;": "\u2306",
-    "Bcy;": "\u0411",
-    "Because;": "\u2235",
-    "Bernoullis;": "\u212c",
-    "Beta;": "\u0392",
-    "Bfr;": "\U0001d505",
-    "Bopf;": "\U0001d539",
-    "Breve;": "\u02d8",
-    "Bscr;": "\u212c",
-    "Bumpeq;": "\u224e",
-    "CHcy;": "\u0427",
-    "COPY": "\xa9",
-    "COPY;": "\xa9",
-    "Cacute;": "\u0106",
-    "Cap;": "\u22d2",
-    "CapitalDifferentialD;": "\u2145",
-    "Cayleys;": "\u212d",
-    "Ccaron;": "\u010c",
-    "Ccedil": "\xc7",
-    "Ccedil;": "\xc7",
-    "Ccirc;": "\u0108",
-    "Cconint;": "\u2230",
-    "Cdot;": "\u010a",
-    "Cedilla;": "\xb8",
-    "CenterDot;": "\xb7",
-    "Cfr;": "\u212d",
-    "Chi;": "\u03a7",
-    "CircleDot;": "\u2299",
-    "CircleMinus;": "\u2296",
-    "CirclePlus;": "\u2295",
-    "CircleTimes;": "\u2297",
-    "ClockwiseContourIntegral;": "\u2232",
-    "CloseCurlyDoubleQuote;": "\u201d",
-    "CloseCurlyQuote;": "\u2019",
-    "Colon;": "\u2237",
-    "Colone;": "\u2a74",
-    "Congruent;": "\u2261",
-    "Conint;": "\u222f",
-    "ContourIntegral;": "\u222e",
-    "Copf;": "\u2102",
-    "Coproduct;": "\u2210",
-    "CounterClockwiseContourIntegral;": "\u2233",
-    "Cross;": "\u2a2f",
-    "Cscr;": "\U0001d49e",
-    "Cup;": "\u22d3",
-    "CupCap;": "\u224d",
-    "DD;": "\u2145",
-    "DDotrahd;": "\u2911",
-    "DJcy;": "\u0402",
-    "DScy;": "\u0405",
-    "DZcy;": "\u040f",
-    "Dagger;": "\u2021",
-    "Darr;": "\u21a1",
-    "Dashv;": "\u2ae4",
-    "Dcaron;": "\u010e",
-    "Dcy;": "\u0414",
-    "Del;": "\u2207",
-    "Delta;": "\u0394",
-    "Dfr;": "\U0001d507",
-    "DiacriticalAcute;": "\xb4",
-    "DiacriticalDot;": "\u02d9",
-    "DiacriticalDoubleAcute;": "\u02dd",
-    "DiacriticalGrave;": "`",
-    "DiacriticalTilde;": "\u02dc",
-    "Diamond;": "\u22c4",
-    "DifferentialD;": "\u2146",
-    "Dopf;": "\U0001d53b",
-    "Dot;": "\xa8",
-    "DotDot;": "\u20dc",
-    "DotEqual;": "\u2250",
-    "DoubleContourIntegral;": "\u222f",
-    "DoubleDot;": "\xa8",
-    "DoubleDownArrow;": "\u21d3",
-    "DoubleLeftArrow;": "\u21d0",
-    "DoubleLeftRightArrow;": "\u21d4",
-    "DoubleLeftTee;": "\u2ae4",
-    "DoubleLongLeftArrow;": "\u27f8",
-    "DoubleLongLeftRightArrow;": "\u27fa",
-    "DoubleLongRightArrow;": "\u27f9",
-    "DoubleRightArrow;": "\u21d2",
-    "DoubleRightTee;": "\u22a8",
-    "DoubleUpArrow;": "\u21d1",
-    "DoubleUpDownArrow;": "\u21d5",
-    "DoubleVerticalBar;": "\u2225",
-    "DownArrow;": "\u2193",
-    "DownArrowBar;": "\u2913",
-    "DownArrowUpArrow;": "\u21f5",
-    "DownBreve;": "\u0311",
-    "DownLeftRightVector;": "\u2950",
-    "DownLeftTeeVector;": "\u295e",
-    "DownLeftVector;": "\u21bd",
-    "DownLeftVectorBar;": "\u2956",
-    "DownRightTeeVector;": "\u295f",
-    "DownRightVector;": "\u21c1",
-    "DownRightVectorBar;": "\u2957",
-    "DownTee;": "\u22a4",
-    "DownTeeArrow;": "\u21a7",
-    "Downarrow;": "\u21d3",
-    "Dscr;": "\U0001d49f",
-    "Dstrok;": "\u0110",
-    "ENG;": "\u014a",
-    "ETH": "\xd0",
-    "ETH;": "\xd0",
-    "Eacute": "\xc9",
-    "Eacute;": "\xc9",
-    "Ecaron;": "\u011a",
-    "Ecirc": "\xca",
-    "Ecirc;": "\xca",
-    "Ecy;": "\u042d",
-    "Edot;": "\u0116",
-    "Efr;": "\U0001d508",
-    "Egrave": "\xc8",
-    "Egrave;": "\xc8",
-    "Element;": "\u2208",
-    "Emacr;": "\u0112",
-    "EmptySmallSquare;": "\u25fb",
-    "EmptyVerySmallSquare;": "\u25ab",
-    "Eogon;": "\u0118",
-    "Eopf;": "\U0001d53c",
-    "Epsilon;": "\u0395",
-    "Equal;": "\u2a75",
-    "EqualTilde;": "\u2242",
-    "Equilibrium;": "\u21cc",
-    "Escr;": "\u2130",
-    "Esim;": "\u2a73",
-    "Eta;": "\u0397",
-    "Euml": "\xcb",
-    "Euml;": "\xcb",
-    "Exists;": "\u2203",
-    "ExponentialE;": "\u2147",
-    "Fcy;": "\u0424",
-    "Ffr;": "\U0001d509",
-    "FilledSmallSquare;": "\u25fc",
-    "FilledVerySmallSquare;": "\u25aa",
-    "Fopf;": "\U0001d53d",
-    "ForAll;": "\u2200",
-    "Fouriertrf;": "\u2131",
-    "Fscr;": "\u2131",
-    "GJcy;": "\u0403",
-    "GT": ">",
-    "GT;": ">",
-    "Gamma;": "\u0393",
-    "Gammad;": "\u03dc",
-    "Gbreve;": "\u011e",
-    "Gcedil;": "\u0122",
-    "Gcirc;": "\u011c",
-    "Gcy;": "\u0413",
-    "Gdot;": "\u0120",
-    "Gfr;": "\U0001d50a",
-    "Gg;": "\u22d9",
-    "Gopf;": "\U0001d53e",
-    "GreaterEqual;": "\u2265",
-    "GreaterEqualLess;": "\u22db",
-    "GreaterFullEqual;": "\u2267",
-    "GreaterGreater;": "\u2aa2",
-    "GreaterLess;": "\u2277",
-    "GreaterSlantEqual;": "\u2a7e",
-    "GreaterTilde;": "\u2273",
-    "Gscr;": "\U0001d4a2",
-    "Gt;": "\u226b",
-    "HARDcy;": "\u042a",
-    "Hacek;": "\u02c7",
-    "Hat;": "^",
-    "Hcirc;": "\u0124",
-    "Hfr;": "\u210c",
-    "HilbertSpace;": "\u210b",
-    "Hopf;": "\u210d",
-    "HorizontalLine;": "\u2500",
-    "Hscr;": "\u210b",
-    "Hstrok;": "\u0126",
-    "HumpDownHump;": "\u224e",
-    "HumpEqual;": "\u224f",
-    "IEcy;": "\u0415",
-    "IJlig;": "\u0132",
-    "IOcy;": "\u0401",
-    "Iacute": "\xcd",
-    "Iacute;": "\xcd",
-    "Icirc": "\xce",
-    "Icirc;": "\xce",
-    "Icy;": "\u0418",
-    "Idot;": "\u0130",
-    "Ifr;": "\u2111",
-    "Igrave": "\xcc",
-    "Igrave;": "\xcc",
-    "Im;": "\u2111",
-    "Imacr;": "\u012a",
-    "ImaginaryI;": "\u2148",
-    "Implies;": "\u21d2",
-    "Int;": "\u222c",
-    "Integral;": "\u222b",
-    "Intersection;": "\u22c2",
-    "InvisibleComma;": "\u2063",
-    "InvisibleTimes;": "\u2062",
-    "Iogon;": "\u012e",
-    "Iopf;": "\U0001d540",
-    "Iota;": "\u0399",
-    "Iscr;": "\u2110",
-    "Itilde;": "\u0128",
-    "Iukcy;": "\u0406",
-    "Iuml": "\xcf",
-    "Iuml;": "\xcf",
-    "Jcirc;": "\u0134",
-    "Jcy;": "\u0419",
-    "Jfr;": "\U0001d50d",
-    "Jopf;": "\U0001d541",
-    "Jscr;": "\U0001d4a5",
-    "Jsercy;": "\u0408",
-    "Jukcy;": "\u0404",
-    "KHcy;": "\u0425",
-    "KJcy;": "\u040c",
-    "Kappa;": "\u039a",
-    "Kcedil;": "\u0136",
-    "Kcy;": "\u041a",
-    "Kfr;": "\U0001d50e",
-    "Kopf;": "\U0001d542",
-    "Kscr;": "\U0001d4a6",
-    "LJcy;": "\u0409",
-    "LT": "<",
-    "LT;": "<",
-    "Lacute;": "\u0139",
-    "Lambda;": "\u039b",
-    "Lang;": "\u27ea",
-    "Laplacetrf;": "\u2112",
-    "Larr;": "\u219e",
-    "Lcaron;": "\u013d",
-    "Lcedil;": "\u013b",
-    "Lcy;": "\u041b",
-    "LeftAngleBracket;": "\u27e8",
-    "LeftArrow;": "\u2190",
-    "LeftArrowBar;": "\u21e4",
-    "LeftArrowRightArrow;": "\u21c6",
-    "LeftCeiling;": "\u2308",
-    "LeftDoubleBracket;": "\u27e6",
-    "LeftDownTeeVector;": "\u2961",
-    "LeftDownVector;": "\u21c3",
-    "LeftDownVectorBar;": "\u2959",
-    "LeftFloor;": "\u230a",
-    "LeftRightArrow;": "\u2194",
-    "LeftRightVector;": "\u294e",
-    "LeftTee;": "\u22a3",
-    "LeftTeeArrow;": "\u21a4",
-    "LeftTeeVector;": "\u295a",
-    "LeftTriangle;": "\u22b2",
-    "LeftTriangleBar;": "\u29cf",
-    "LeftTriangleEqual;": "\u22b4",
-    "LeftUpDownVector;": "\u2951",
-    "LeftUpTeeVector;": "\u2960",
-    "LeftUpVector;": "\u21bf",
-    "LeftUpVectorBar;": "\u2958",
-    "LeftVector;": "\u21bc",
-    "LeftVectorBar;": "\u2952",
-    "Leftarrow;": "\u21d0",
-    "Leftrightarrow;": "\u21d4",
-    "LessEqualGreater;": "\u22da",
-    "LessFullEqual;": "\u2266",
-    "LessGreater;": "\u2276",
-    "LessLess;": "\u2aa1",
-    "LessSlantEqual;": "\u2a7d",
-    "LessTilde;": "\u2272",
-    "Lfr;": "\U0001d50f",
-    "Ll;": "\u22d8",
-    "Lleftarrow;": "\u21da",
-    "Lmidot;": "\u013f",
-    "LongLeftArrow;": "\u27f5",
-    "LongLeftRightArrow;": "\u27f7",
-    "LongRightArrow;": "\u27f6",
-    "Longleftarrow;": "\u27f8",
-    "Longleftrightarrow;": "\u27fa",
-    "Longrightarrow;": "\u27f9",
-    "Lopf;": "\U0001d543",
-    "LowerLeftArrow;": "\u2199",
-    "LowerRightArrow;": "\u2198",
-    "Lscr;": "\u2112",
-    "Lsh;": "\u21b0",
-    "Lstrok;": "\u0141",
-    "Lt;": "\u226a",
-    "Map;": "\u2905",
-    "Mcy;": "\u041c",
-    "MediumSpace;": "\u205f",
-    "Mellintrf;": "\u2133",
-    "Mfr;": "\U0001d510",
-    "MinusPlus;": "\u2213",
-    "Mopf;": "\U0001d544",
-    "Mscr;": "\u2133",
-    "Mu;": "\u039c",
-    "NJcy;": "\u040a",
-    "Nacute;": "\u0143",
-    "Ncaron;": "\u0147",
-    "Ncedil;": "\u0145",
-    "Ncy;": "\u041d",
-    "NegativeMediumSpace;": "\u200b",
-    "NegativeThickSpace;": "\u200b",
-    "NegativeThinSpace;": "\u200b",
-    "NegativeVeryThinSpace;": "\u200b",
-    "NestedGreaterGreater;": "\u226b",
-    "NestedLessLess;": "\u226a",
-    "NewLine;": "\n",
-    "Nfr;": "\U0001d511",
-    "NoBreak;": "\u2060",
-    "NonBreakingSpace;": "\xa0",
-    "Nopf;": "\u2115",
-    "Not;": "\u2aec",
-    "NotCongruent;": "\u2262",
-    "NotCupCap;": "\u226d",
-    "NotDoubleVerticalBar;": "\u2226",
-    "NotElement;": "\u2209",
-    "NotEqual;": "\u2260",
-    "NotEqualTilde;": "\u2242\u0338",
-    "NotExists;": "\u2204",
-    "NotGreater;": "\u226f",
-    "NotGreaterEqual;": "\u2271",
-    "NotGreaterFullEqual;": "\u2267\u0338",
-    "NotGreaterGreater;": "\u226b\u0338",
-    "NotGreaterLess;": "\u2279",
-    "NotGreaterSlantEqual;": "\u2a7e\u0338",
-    "NotGreaterTilde;": "\u2275",
-    "NotHumpDownHump;": "\u224e\u0338",
-    "NotHumpEqual;": "\u224f\u0338",
-    "NotLeftTriangle;": "\u22ea",
-    "NotLeftTriangleBar;": "\u29cf\u0338",
-    "NotLeftTriangleEqual;": "\u22ec",
-    "NotLess;": "\u226e",
-    "NotLessEqual;": "\u2270",
-    "NotLessGreater;": "\u2278",
-    "NotLessLess;": "\u226a\u0338",
-    "NotLessSlantEqual;": "\u2a7d\u0338",
-    "NotLessTilde;": "\u2274",
-    "NotNestedGreaterGreater;": "\u2aa2\u0338",
-    "NotNestedLessLess;": "\u2aa1\u0338",
-    "NotPrecedes;": "\u2280",
-    "NotPrecedesEqual;": "\u2aaf\u0338",
-    "NotPrecedesSlantEqual;": "\u22e0",
-    "NotReverseElement;": "\u220c",
-    "NotRightTriangle;": "\u22eb",
-    "NotRightTriangleBar;": "\u29d0\u0338",
-    "NotRightTriangleEqual;": "\u22ed",
-    "NotSquareSubset;": "\u228f\u0338",
-    "NotSquareSubsetEqual;": "\u22e2",
-    "NotSquareSuperset;": "\u2290\u0338",
-    "NotSquareSupersetEqual;": "\u22e3",
-    "NotSubset;": "\u2282\u20d2",
-    "NotSubsetEqual;": "\u2288",
-    "NotSucceeds;": "\u2281",
-    "NotSucceedsEqual;": "\u2ab0\u0338",
-    "NotSucceedsSlantEqual;": "\u22e1",
-    "NotSucceedsTilde;": "\u227f\u0338",
-    "NotSuperset;": "\u2283\u20d2",
-    "NotSupersetEqual;": "\u2289",
-    "NotTilde;": "\u2241",
-    "NotTildeEqual;": "\u2244",
-    "NotTildeFullEqual;": "\u2247",
-    "NotTildeTilde;": "\u2249",
-    "NotVerticalBar;": "\u2224",
-    "Nscr;": "\U0001d4a9",
-    "Ntilde": "\xd1",
-    "Ntilde;": "\xd1",
-    "Nu;": "\u039d",
-    "OElig;": "\u0152",
-    "Oacute": "\xd3",
-    "Oacute;": "\xd3",
-    "Ocirc": "\xd4",
-    "Ocirc;": "\xd4",
-    "Ocy;": "\u041e",
-    "Odblac;": "\u0150",
-    "Ofr;": "\U0001d512",
-    "Ograve": "\xd2",
-    "Ograve;": "\xd2",
-    "Omacr;": "\u014c",
-    "Omega;": "\u03a9",
-    "Omicron;": "\u039f",
-    "Oopf;": "\U0001d546",
-    "OpenCurlyDoubleQuote;": "\u201c",
-    "OpenCurlyQuote;": "\u2018",
-    "Or;": "\u2a54",
-    "Oscr;": "\U0001d4aa",
-    "Oslash": "\xd8",
-    "Oslash;": "\xd8",
-    "Otilde": "\xd5",
-    "Otilde;": "\xd5",
-    "Otimes;": "\u2a37",
-    "Ouml": "\xd6",
-    "Ouml;": "\xd6",
-    "OverBar;": "\u203e",
-    "OverBrace;": "\u23de",
-    "OverBracket;": "\u23b4",
-    "OverParenthesis;": "\u23dc",
-    "PartialD;": "\u2202",
-    "Pcy;": "\u041f",
-    "Pfr;": "\U0001d513",
-    "Phi;": "\u03a6",
-    "Pi;": "\u03a0",
-    "PlusMinus;": "\xb1",
-    "Poincareplane;": "\u210c",
-    "Popf;": "\u2119",
-    "Pr;": "\u2abb",
-    "Precedes;": "\u227a",
-    "PrecedesEqual;": "\u2aaf",
-    "PrecedesSlantEqual;": "\u227c",
-    "PrecedesTilde;": "\u227e",
-    "Prime;": "\u2033",
-    "Product;": "\u220f",
-    "Proportion;": "\u2237",
-    "Proportional;": "\u221d",
-    "Pscr;": "\U0001d4ab",
-    "Psi;": "\u03a8",
-    "QUOT": "\"",
-    "QUOT;": "\"",
-    "Qfr;": "\U0001d514",
-    "Qopf;": "\u211a",
-    "Qscr;": "\U0001d4ac",
-    "RBarr;": "\u2910",
-    "REG": "\xae",
-    "REG;": "\xae",
-    "Racute;": "\u0154",
-    "Rang;": "\u27eb",
-    "Rarr;": "\u21a0",
-    "Rarrtl;": "\u2916",
-    "Rcaron;": "\u0158",
-    "Rcedil;": "\u0156",
-    "Rcy;": "\u0420",
-    "Re;": "\u211c",
-    "ReverseElement;": "\u220b",
-    "ReverseEquilibrium;": "\u21cb",
-    "ReverseUpEquilibrium;": "\u296f",
-    "Rfr;": "\u211c",
-    "Rho;": "\u03a1",
-    "RightAngleBracket;": "\u27e9",
-    "RightArrow;": "\u2192",
-    "RightArrowBar;": "\u21e5",
-    "RightArrowLeftArrow;": "\u21c4",
-    "RightCeiling;": "\u2309",
-    "RightDoubleBracket;": "\u27e7",
-    "RightDownTeeVector;": "\u295d",
-    "RightDownVector;": "\u21c2",
-    "RightDownVectorBar;": "\u2955",
-    "RightFloor;": "\u230b",
-    "RightTee;": "\u22a2",
-    "RightTeeArrow;": "\u21a6",
-    "RightTeeVector;": "\u295b",
-    "RightTriangle;": "\u22b3",
-    "RightTriangleBar;": "\u29d0",
-    "RightTriangleEqual;": "\u22b5",
-    "RightUpDownVector;": "\u294f",
-    "RightUpTeeVector;": "\u295c",
-    "RightUpVector;": "\u21be",
-    "RightUpVectorBar;": "\u2954",
-    "RightVector;": "\u21c0",
-    "RightVectorBar;": "\u2953",
-    "Rightarrow;": "\u21d2",
-    "Ropf;": "\u211d",
-    "RoundImplies;": "\u2970",
-    "Rrightarrow;": "\u21db",
-    "Rscr;": "\u211b",
-    "Rsh;": "\u21b1",
-    "RuleDelayed;": "\u29f4",
-    "SHCHcy;": "\u0429",
-    "SHcy;": "\u0428",
-    "SOFTcy;": "\u042c",
-    "Sacute;": "\u015a",
-    "Sc;": "\u2abc",
-    "Scaron;": "\u0160",
-    "Scedil;": "\u015e",
-    "Scirc;": "\u015c",
-    "Scy;": "\u0421",
-    "Sfr;": "\U0001d516",
-    "ShortDownArrow;": "\u2193",
-    "ShortLeftArrow;": "\u2190",
-    "ShortRightArrow;": "\u2192",
-    "ShortUpArrow;": "\u2191",
-    "Sigma;": "\u03a3",
-    "SmallCircle;": "\u2218",
-    "Sopf;": "\U0001d54a",
-    "Sqrt;": "\u221a",
-    "Square;": "\u25a1",
-    "SquareIntersection;": "\u2293",
-    "SquareSubset;": "\u228f",
-    "SquareSubsetEqual;": "\u2291",
-    "SquareSuperset;": "\u2290",
-    "SquareSupersetEqual;": "\u2292",
-    "SquareUnion;": "\u2294",
-    "Sscr;": "\U0001d4ae",
-    "Star;": "\u22c6",
-    "Sub;": "\u22d0",
-    "Subset;": "\u22d0",
-    "SubsetEqual;": "\u2286",
-    "Succeeds;": "\u227b",
-    "SucceedsEqual;": "\u2ab0",
-    "SucceedsSlantEqual;": "\u227d",
-    "SucceedsTilde;": "\u227f",
-    "SuchThat;": "\u220b",
-    "Sum;": "\u2211",
-    "Sup;": "\u22d1",
-    "Superset;": "\u2283",
-    "SupersetEqual;": "\u2287",
-    "Supset;": "\u22d1",
-    "THORN": "\xde",
-    "THORN;": "\xde",
-    "TRADE;": "\u2122",
-    "TSHcy;": "\u040b",
-    "TScy;": "\u0426",
-    "Tab;": "\t",
-    "Tau;": "\u03a4",
-    "Tcaron;": "\u0164",
-    "Tcedil;": "\u0162",
-    "Tcy;": "\u0422",
-    "Tfr;": "\U0001d517",
-    "Therefore;": "\u2234",
-    "Theta;": "\u0398",
-    "ThickSpace;": "\u205f\u200a",
-    "ThinSpace;": "\u2009",
-    "Tilde;": "\u223c",
-    "TildeEqual;": "\u2243",
-    "TildeFullEqual;": "\u2245",
-    "TildeTilde;": "\u2248",
-    "Topf;": "\U0001d54b",
-    "TripleDot;": "\u20db",
-    "Tscr;": "\U0001d4af",
-    "Tstrok;": "\u0166",
-    "Uacute": "\xda",
-    "Uacute;": "\xda",
-    "Uarr;": "\u219f",
-    "Uarrocir;": "\u2949",
-    "Ubrcy;": "\u040e",
-    "Ubreve;": "\u016c",
-    "Ucirc": "\xdb",
-    "Ucirc;": "\xdb",
-    "Ucy;": "\u0423",
-    "Udblac;": "\u0170",
-    "Ufr;": "\U0001d518",
-    "Ugrave": "\xd9",
-    "Ugrave;": "\xd9",
-    "Umacr;": "\u016a",
-    "UnderBar;": "_",
-    "UnderBrace;": "\u23df",
-    "UnderBracket;": "\u23b5",
-    "UnderParenthesis;": "\u23dd",
-    "Union;": "\u22c3",
-    "UnionPlus;": "\u228e",
-    "Uogon;": "\u0172",
-    "Uopf;": "\U0001d54c",
-    "UpArrow;": "\u2191",
-    "UpArrowBar;": "\u2912",
-    "UpArrowDownArrow;": "\u21c5",
-    "UpDownArrow;": "\u2195",
-    "UpEquilibrium;": "\u296e",
-    "UpTee;": "\u22a5",
-    "UpTeeArrow;": "\u21a5",
-    "Uparrow;": "\u21d1",
-    "Updownarrow;": "\u21d5",
-    "UpperLeftArrow;": "\u2196",
-    "UpperRightArrow;": "\u2197",
-    "Upsi;": "\u03d2",
-    "Upsilon;": "\u03a5",
-    "Uring;": "\u016e",
-    "Uscr;": "\U0001d4b0",
-    "Utilde;": "\u0168",
-    "Uuml": "\xdc",
-    "Uuml;": "\xdc",
-    "VDash;": "\u22ab",
-    "Vbar;": "\u2aeb",
-    "Vcy;": "\u0412",
-    "Vdash;": "\u22a9",
-    "Vdashl;": "\u2ae6",
-    "Vee;": "\u22c1",
-    "Verbar;": "\u2016",
-    "Vert;": "\u2016",
-    "VerticalBar;": "\u2223",
-    "VerticalLine;": "|",
-    "VerticalSeparator;": "\u2758",
-    "VerticalTilde;": "\u2240",
-    "VeryThinSpace;": "\u200a",
-    "Vfr;": "\U0001d519",
-    "Vopf;": "\U0001d54d",
-    "Vscr;": "\U0001d4b1",
-    "Vvdash;": "\u22aa",
-    "Wcirc;": "\u0174",
-    "Wedge;": "\u22c0",
-    "Wfr;": "\U0001d51a",
-    "Wopf;": "\U0001d54e",
-    "Wscr;": "\U0001d4b2",
-    "Xfr;": "\U0001d51b",
-    "Xi;": "\u039e",
-    "Xopf;": "\U0001d54f",
-    "Xscr;": "\U0001d4b3",
-    "YAcy;": "\u042f",
-    "YIcy;": "\u0407",
-    "YUcy;": "\u042e",
-    "Yacute": "\xdd",
-    "Yacute;": "\xdd",
-    "Ycirc;": "\u0176",
-    "Ycy;": "\u042b",
-    "Yfr;": "\U0001d51c",
-    "Yopf;": "\U0001d550",
-    "Yscr;": "\U0001d4b4",
-    "Yuml;": "\u0178",
-    "ZHcy;": "\u0416",
-    "Zacute;": "\u0179",
-    "Zcaron;": "\u017d",
-    "Zcy;": "\u0417",
-    "Zdot;": "\u017b",
-    "ZeroWidthSpace;": "\u200b",
-    "Zeta;": "\u0396",
-    "Zfr;": "\u2128",
-    "Zopf;": "\u2124",
-    "Zscr;": "\U0001d4b5",
-    "aacute": "\xe1",
-    "aacute;": "\xe1",
-    "abreve;": "\u0103",
-    "ac;": "\u223e",
-    "acE;": "\u223e\u0333",
-    "acd;": "\u223f",
-    "acirc": "\xe2",
-    "acirc;": "\xe2",
-    "acute": "\xb4",
-    "acute;": "\xb4",
-    "acy;": "\u0430",
-    "aelig": "\xe6",
-    "aelig;": "\xe6",
-    "af;": "\u2061",
-    "afr;": "\U0001d51e",
-    "agrave": "\xe0",
-    "agrave;": "\xe0",
-    "alefsym;": "\u2135",
-    "aleph;": "\u2135",
-    "alpha;": "\u03b1",
-    "amacr;": "\u0101",
-    "amalg;": "\u2a3f",
-    "amp": "&",
-    "amp;": "&",
-    "and;": "\u2227",
-    "andand;": "\u2a55",
-    "andd;": "\u2a5c",
-    "andslope;": "\u2a58",
-    "andv;": "\u2a5a",
-    "ang;": "\u2220",
-    "ange;": "\u29a4",
-    "angle;": "\u2220",
-    "angmsd;": "\u2221",
-    "angmsdaa;": "\u29a8",
-    "angmsdab;": "\u29a9",
-    "angmsdac;": "\u29aa",
-    "angmsdad;": "\u29ab",
-    "angmsdae;": "\u29ac",
-    "angmsdaf;": "\u29ad",
-    "angmsdag;": "\u29ae",
-    "angmsdah;": "\u29af",
-    "angrt;": "\u221f",
-    "angrtvb;": "\u22be",
-    "angrtvbd;": "\u299d",
-    "angsph;": "\u2222",
-    "angst;": "\xc5",
-    "angzarr;": "\u237c",
-    "aogon;": "\u0105",
-    "aopf;": "\U0001d552",
-    "ap;": "\u2248",
-    "apE;": "\u2a70",
-    "apacir;": "\u2a6f",
-    "ape;": "\u224a",
-    "apid;": "\u224b",
-    "apos;": "'",
-    "approx;": "\u2248",
-    "approxeq;": "\u224a",
-    "aring": "\xe5",
-    "aring;": "\xe5",
-    "ascr;": "\U0001d4b6",
-    "ast;": "*",
-    "asymp;": "\u2248",
-    "asympeq;": "\u224d",
-    "atilde": "\xe3",
-    "atilde;": "\xe3",
-    "auml": "\xe4",
-    "auml;": "\xe4",
-    "awconint;": "\u2233",
-    "awint;": "\u2a11",
-    "bNot;": "\u2aed",
-    "backcong;": "\u224c",
-    "backepsilon;": "\u03f6",
-    "backprime;": "\u2035",
-    "backsim;": "\u223d",
-    "backsimeq;": "\u22cd",
-    "barvee;": "\u22bd",
-    "barwed;": "\u2305",
-    "barwedge;": "\u2305",
-    "bbrk;": "\u23b5",
-    "bbrktbrk;": "\u23b6",
-    "bcong;": "\u224c",
-    "bcy;": "\u0431",
-    "bdquo;": "\u201e",
-    "becaus;": "\u2235",
-    "because;": "\u2235",
-    "bemptyv;": "\u29b0",
-    "bepsi;": "\u03f6",
-    "bernou;": "\u212c",
-    "beta;": "\u03b2",
-    "beth;": "\u2136",
-    "between;": "\u226c",
-    "bfr;": "\U0001d51f",
-    "bigcap;": "\u22c2",
-    "bigcirc;": "\u25ef",
-    "bigcup;": "\u22c3",
-    "bigodot;": "\u2a00",
-    "bigoplus;": "\u2a01",
-    "bigotimes;": "\u2a02",
-    "bigsqcup;": "\u2a06",
-    "bigstar;": "\u2605",
-    "bigtriangledown;": "\u25bd",
-    "bigtriangleup;": "\u25b3",
-    "biguplus;": "\u2a04",
-    "bigvee;": "\u22c1",
-    "bigwedge;": "\u22c0",
-    "bkarow;": "\u290d",
-    "blacklozenge;": "\u29eb",
-    "blacksquare;": "\u25aa",
-    "blacktriangle;": "\u25b4",
-    "blacktriangledown;": "\u25be",
-    "blacktriangleleft;": "\u25c2",
-    "blacktriangleright;": "\u25b8",
-    "blank;": "\u2423",
-    "blk12;": "\u2592",
-    "blk14;": "\u2591",
-    "blk34;": "\u2593",
-    "block;": "\u2588",
-    "bne;": "=\u20e5",
-    "bnequiv;": "\u2261\u20e5",
-    "bnot;": "\u2310",
-    "bopf;": "\U0001d553",
-    "bot;": "\u22a5",
-    "bottom;": "\u22a5",
-    "bowtie;": "\u22c8",
-    "boxDL;": "\u2557",
-    "boxDR;": "\u2554",
-    "boxDl;": "\u2556",
-    "boxDr;": "\u2553",
-    "boxH;": "\u2550",
-    "boxHD;": "\u2566",
-    "boxHU;": "\u2569",
-    "boxHd;": "\u2564",
-    "boxHu;": "\u2567",
-    "boxUL;": "\u255d",
-    "boxUR;": "\u255a",
-    "boxUl;": "\u255c",
-    "boxUr;": "\u2559",
-    "boxV;": "\u2551",
-    "boxVH;": "\u256c",
-    "boxVL;": "\u2563",
-    "boxVR;": "\u2560",
-    "boxVh;": "\u256b",
-    "boxVl;": "\u2562",
-    "boxVr;": "\u255f",
-    "boxbox;": "\u29c9",
-    "boxdL;": "\u2555",
-    "boxdR;": "\u2552",
-    "boxdl;": "\u2510",
-    "boxdr;": "\u250c",
-    "boxh;": "\u2500",
-    "boxhD;": "\u2565",
-    "boxhU;": "\u2568",
-    "boxhd;": "\u252c",
-    "boxhu;": "\u2534",
-    "boxminus;": "\u229f",
-    "boxplus;": "\u229e",
-    "boxtimes;": "\u22a0",
-    "boxuL;": "\u255b",
-    "boxuR;": "\u2558",
-    "boxul;": "\u2518",
-    "boxur;": "\u2514",
-    "boxv;": "\u2502",
-    "boxvH;": "\u256a",
-    "boxvL;": "\u2561",
-    "boxvR;": "\u255e",
-    "boxvh;": "\u253c",
-    "boxvl;": "\u2524",
-    "boxvr;": "\u251c",
-    "bprime;": "\u2035",
-    "breve;": "\u02d8",
-    "brvbar": "\xa6",
-    "brvbar;": "\xa6",
-    "bscr;": "\U0001d4b7",
-    "bsemi;": "\u204f",
-    "bsim;": "\u223d",
-    "bsime;": "\u22cd",
-    "bsol;": "\\",
-    "bsolb;": "\u29c5",
-    "bsolhsub;": "\u27c8",
-    "bull;": "\u2022",
-    "bullet;": "\u2022",
-    "bump;": "\u224e",
-    "bumpE;": "\u2aae",
-    "bumpe;": "\u224f",
-    "bumpeq;": "\u224f",
-    "cacute;": "\u0107",
-    "cap;": "\u2229",
-    "capand;": "\u2a44",
-    "capbrcup;": "\u2a49",
-    "capcap;": "\u2a4b",
-    "capcup;": "\u2a47",
-    "capdot;": "\u2a40",
-    "caps;": "\u2229\ufe00",
-    "caret;": "\u2041",
-    "caron;": "\u02c7",
-    "ccaps;": "\u2a4d",
-    "ccaron;": "\u010d",
-    "ccedil": "\xe7",
-    "ccedil;": "\xe7",
-    "ccirc;": "\u0109",
-    "ccups;": "\u2a4c",
-    "ccupssm;": "\u2a50",
-    "cdot;": "\u010b",
-    "cedil": "\xb8",
-    "cedil;": "\xb8",
-    "cemptyv;": "\u29b2",
-    "cent": "\xa2",
-    "cent;": "\xa2",
-    "centerdot;": "\xb7",
-    "cfr;": "\U0001d520",
-    "chcy;": "\u0447",
-    "check;": "\u2713",
-    "checkmark;": "\u2713",
-    "chi;": "\u03c7",
-    "cir;": "\u25cb",
-    "cirE;": "\u29c3",
-    "circ;": "\u02c6",
-    "circeq;": "\u2257",
-    "circlearrowleft;": "\u21ba",
-    "circlearrowright;": "\u21bb",
-    "circledR;": "\xae",
-    "circledS;": "\u24c8",
-    "circledast;": "\u229b",
-    "circledcirc;": "\u229a",
-    "circleddash;": "\u229d",
-    "cire;": "\u2257",
-    "cirfnint;": "\u2a10",
-    "cirmid;": "\u2aef",
-    "cirscir;": "\u29c2",
-    "clubs;": "\u2663",
-    "clubsuit;": "\u2663",
-    "colon;": ":",
-    "colone;": "\u2254",
-    "coloneq;": "\u2254",
-    "comma;": ",",
-    "commat;": "@",
-    "comp;": "\u2201",
-    "compfn;": "\u2218",
-    "complement;": "\u2201",
-    "complexes;": "\u2102",
-    "cong;": "\u2245",
-    "congdot;": "\u2a6d",
-    "conint;": "\u222e",
-    "copf;": "\U0001d554",
-    "coprod;": "\u2210",
-    "copy": "\xa9",
-    "copy;": "\xa9",
-    "copysr;": "\u2117",
-    "crarr;": "\u21b5",
-    "cross;": "\u2717",
-    "cscr;": "\U0001d4b8",
-    "csub;": "\u2acf",
-    "csube;": "\u2ad1",
-    "csup;": "\u2ad0",
-    "csupe;": "\u2ad2",
-    "ctdot;": "\u22ef",
-    "cudarrl;": "\u2938",
-    "cudarrr;": "\u2935",
-    "cuepr;": "\u22de",
-    "cuesc;": "\u22df",
-    "cularr;": "\u21b6",
-    "cularrp;": "\u293d",
-    "cup;": "\u222a",
-    "cupbrcap;": "\u2a48",
-    "cupcap;": "\u2a46",
-    "cupcup;": "\u2a4a",
-    "cupdot;": "\u228d",
-    "cupor;": "\u2a45",
-    "cups;": "\u222a\ufe00",
-    "curarr;": "\u21b7",
-    "curarrm;": "\u293c",
-    "curlyeqprec;": "\u22de",
-    "curlyeqsucc;": "\u22df",
-    "curlyvee;": "\u22ce",
-    "curlywedge;": "\u22cf",
-    "curren": "\xa4",
-    "curren;": "\xa4",
-    "curvearrowleft;": "\u21b6",
-    "curvearrowright;": "\u21b7",
-    "cuvee;": "\u22ce",
-    "cuwed;": "\u22cf",
-    "cwconint;": "\u2232",
-    "cwint;": "\u2231",
-    "cylcty;": "\u232d",
-    "dArr;": "\u21d3",
-    "dHar;": "\u2965",
-    "dagger;": "\u2020",
-    "daleth;": "\u2138",
-    "darr;": "\u2193",
-    "dash;": "\u2010",
-    "dashv;": "\u22a3",
-    "dbkarow;": "\u290f",
-    "dblac;": "\u02dd",
-    "dcaron;": "\u010f",
-    "dcy;": "\u0434",
-    "dd;": "\u2146",
-    "ddagger;": "\u2021",
-    "ddarr;": "\u21ca",
-    "ddotseq;": "\u2a77",
-    "deg": "\xb0",
-    "deg;": "\xb0",
-    "delta;": "\u03b4",
-    "demptyv;": "\u29b1",
-    "dfisht;": "\u297f",
-    "dfr;": "\U0001d521",
-    "dharl;": "\u21c3",
-    "dharr;": "\u21c2",
-    "diam;": "\u22c4",
-    "diamond;": "\u22c4",
-    "diamondsuit;": "\u2666",
-    "diams;": "\u2666",
-    "die;": "\xa8",
-    "digamma;": "\u03dd",
-    "disin;": "\u22f2",
-    "div;": "\xf7",
-    "divide": "\xf7",
-    "divide;": "\xf7",
-    "divideontimes;": "\u22c7",
-    "divonx;": "\u22c7",
-    "djcy;": "\u0452",
-    "dlcorn;": "\u231e",
-    "dlcrop;": "\u230d",
-    "dollar;": "$",
-    "dopf;": "\U0001d555",
-    "dot;": "\u02d9",
-    "doteq;": "\u2250",
-    "doteqdot;": "\u2251",
-    "dotminus;": "\u2238",
-    "dotplus;": "\u2214",
-    "dotsquare;": "\u22a1",
-    "doublebarwedge;": "\u2306",
-    "downarrow;": "\u2193",
-    "downdownarrows;": "\u21ca",
-    "downharpoonleft;": "\u21c3",
-    "downharpoonright;": "\u21c2",
-    "drbkarow;": "\u2910",
-    "drcorn;": "\u231f",
-    "drcrop;": "\u230c",
-    "dscr;": "\U0001d4b9",
-    "dscy;": "\u0455",
-    "dsol;": "\u29f6",
-    "dstrok;": "\u0111",
-    "dtdot;": "\u22f1",
-    "dtri;": "\u25bf",
-    "dtrif;": "\u25be",
-    "duarr;": "\u21f5",
-    "duhar;": "\u296f",
-    "dwangle;": "\u29a6",
-    "dzcy;": "\u045f",
-    "dzigrarr;": "\u27ff",
-    "eDDot;": "\u2a77",
-    "eDot;": "\u2251",
-    "eacute": "\xe9",
-    "eacute;": "\xe9",
-    "easter;": "\u2a6e",
-    "ecaron;": "\u011b",
-    "ecir;": "\u2256",
-    "ecirc": "\xea",
-    "ecirc;": "\xea",
-    "ecolon;": "\u2255",
-    "ecy;": "\u044d",
-    "edot;": "\u0117",
-    "ee;": "\u2147",
-    "efDot;": "\u2252",
-    "efr;": "\U0001d522",
-    "eg;": "\u2a9a",
-    "egrave": "\xe8",
-    "egrave;": "\xe8",
-    "egs;": "\u2a96",
-    "egsdot;": "\u2a98",
-    "el;": "\u2a99",
-    "elinters;": "\u23e7",
-    "ell;": "\u2113",
-    "els;": "\u2a95",
-    "elsdot;": "\u2a97",
-    "emacr;": "\u0113",
-    "empty;": "\u2205",
-    "emptyset;": "\u2205",
-    "emptyv;": "\u2205",
-    "emsp13;": "\u2004",
-    "emsp14;": "\u2005",
-    "emsp;": "\u2003",
-    "eng;": "\u014b",
-    "ensp;": "\u2002",
-    "eogon;": "\u0119",
-    "eopf;": "\U0001d556",
-    "epar;": "\u22d5",
-    "eparsl;": "\u29e3",
-    "eplus;": "\u2a71",
-    "epsi;": "\u03b5",
-    "epsilon;": "\u03b5",
-    "epsiv;": "\u03f5",
-    "eqcirc;": "\u2256",
-    "eqcolon;": "\u2255",
-    "eqsim;": "\u2242",
-    "eqslantgtr;": "\u2a96",
-    "eqslantless;": "\u2a95",
-    "equals;": "=",
-    "equest;": "\u225f",
-    "equiv;": "\u2261",
-    "equivDD;": "\u2a78",
-    "eqvparsl;": "\u29e5",
-    "erDot;": "\u2253",
-    "erarr;": "\u2971",
-    "escr;": "\u212f",
-    "esdot;": "\u2250",
-    "esim;": "\u2242",
-    "eta;": "\u03b7",
-    "eth": "\xf0",
-    "eth;": "\xf0",
-    "euml": "\xeb",
-    "euml;": "\xeb",
-    "euro;": "\u20ac",
-    "excl;": "!",
-    "exist;": "\u2203",
-    "expectation;": "\u2130",
-    "exponentiale;": "\u2147",
-    "fallingdotseq;": "\u2252",
-    "fcy;": "\u0444",
-    "female;": "\u2640",
-    "ffilig;": "\ufb03",
-    "fflig;": "\ufb00",
-    "ffllig;": "\ufb04",
-    "ffr;": "\U0001d523",
-    "filig;": "\ufb01",
-    "fjlig;": "fj",
-    "flat;": "\u266d",
-    "fllig;": "\ufb02",
-    "fltns;": "\u25b1",
-    "fnof;": "\u0192",
-    "fopf;": "\U0001d557",
-    "forall;": "\u2200",
-    "fork;": "\u22d4",
-    "forkv;": "\u2ad9",
-    "fpartint;": "\u2a0d",
-    "frac12": "\xbd",
-    "frac12;": "\xbd",
-    "frac13;": "\u2153",
-    "frac14": "\xbc",
-    "frac14;": "\xbc",
-    "frac15;": "\u2155",
-    "frac16;": "\u2159",
-    "frac18;": "\u215b",
-    "frac23;": "\u2154",
-    "frac25;": "\u2156",
-    "frac34": "\xbe",
-    "frac34;": "\xbe",
-    "frac35;": "\u2157",
-    "frac38;": "\u215c",
-    "frac45;": "\u2158",
-    "frac56;": "\u215a",
-    "frac58;": "\u215d",
-    "frac78;": "\u215e",
-    "frasl;": "\u2044",
-    "frown;": "\u2322",
-    "fscr;": "\U0001d4bb",
-    "gE;": "\u2267",
-    "gEl;": "\u2a8c",
-    "gacute;": "\u01f5",
-    "gamma;": "\u03b3",
-    "gammad;": "\u03dd",
-    "gap;": "\u2a86",
-    "gbreve;": "\u011f",
-    "gcirc;": "\u011d",
-    "gcy;": "\u0433",
-    "gdot;": "\u0121",
-    "ge;": "\u2265",
-    "gel;": "\u22db",
-    "geq;": "\u2265",
-    "geqq;": "\u2267",
-    "geqslant;": "\u2a7e",
-    "ges;": "\u2a7e",
-    "gescc;": "\u2aa9",
-    "gesdot;": "\u2a80",
-    "gesdoto;": "\u2a82",
-    "gesdotol;": "\u2a84",
-    "gesl;": "\u22db\ufe00",
-    "gesles;": "\u2a94",
-    "gfr;": "\U0001d524",
-    "gg;": "\u226b",
-    "ggg;": "\u22d9",
-    "gimel;": "\u2137",
-    "gjcy;": "\u0453",
-    "gl;": "\u2277",
-    "glE;": "\u2a92",
-    "gla;": "\u2aa5",
-    "glj;": "\u2aa4",
-    "gnE;": "\u2269",
-    "gnap;": "\u2a8a",
-    "gnapprox;": "\u2a8a",
-    "gne;": "\u2a88",
-    "gneq;": "\u2a88",
-    "gneqq;": "\u2269",
-    "gnsim;": "\u22e7",
-    "gopf;": "\U0001d558",
-    "grave;": "`",
-    "gscr;": "\u210a",
-    "gsim;": "\u2273",
-    "gsime;": "\u2a8e",
-    "gsiml;": "\u2a90",
-    "gt": ">",
-    "gt;": ">",
-    "gtcc;": "\u2aa7",
-    "gtcir;": "\u2a7a",
-    "gtdot;": "\u22d7",
-    "gtlPar;": "\u2995",
-    "gtquest;": "\u2a7c",
-    "gtrapprox;": "\u2a86",
-    "gtrarr;": "\u2978",
-    "gtrdot;": "\u22d7",
-    "gtreqless;": "\u22db",
-    "gtreqqless;": "\u2a8c",
-    "gtrless;": "\u2277",
-    "gtrsim;": "\u2273",
-    "gvertneqq;": "\u2269\ufe00",
-    "gvnE;": "\u2269\ufe00",
-    "hArr;": "\u21d4",
-    "hairsp;": "\u200a",
-    "half;": "\xbd",
-    "hamilt;": "\u210b",
-    "hardcy;": "\u044a",
-    "harr;": "\u2194",
-    "harrcir;": "\u2948",
-    "harrw;": "\u21ad",
-    "hbar;": "\u210f",
-    "hcirc;": "\u0125",
-    "hearts;": "\u2665",
-    "heartsuit;": "\u2665",
-    "hellip;": "\u2026",
-    "hercon;": "\u22b9",
-    "hfr;": "\U0001d525",
-    "hksearow;": "\u2925",
-    "hkswarow;": "\u2926",
-    "hoarr;": "\u21ff",
-    "homtht;": "\u223b",
-    "hookleftarrow;": "\u21a9",
-    "hookrightarrow;": "\u21aa",
-    "hopf;": "\U0001d559",
-    "horbar;": "\u2015",
-    "hscr;": "\U0001d4bd",
-    "hslash;": "\u210f",
-    "hstrok;": "\u0127",
-    "hybull;": "\u2043",
-    "hyphen;": "\u2010",
-    "iacute": "\xed",
-    "iacute;": "\xed",
-    "ic;": "\u2063",
-    "icirc": "\xee",
-    "icirc;": "\xee",
-    "icy;": "\u0438",
-    "iecy;": "\u0435",
-    "iexcl": "\xa1",
-    "iexcl;": "\xa1",
-    "iff;": "\u21d4",
-    "ifr;": "\U0001d526",
-    "igrave": "\xec",
-    "igrave;": "\xec",
-    "ii;": "\u2148",
-    "iiiint;": "\u2a0c",
-    "iiint;": "\u222d",
-    "iinfin;": "\u29dc",
-    "iiota;": "\u2129",
-    "ijlig;": "\u0133",
-    "imacr;": "\u012b",
-    "image;": "\u2111",
-    "imagline;": "\u2110",
-    "imagpart;": "\u2111",
-    "imath;": "\u0131",
-    "imof;": "\u22b7",
-    "imped;": "\u01b5",
-    "in;": "\u2208",
-    "incare;": "\u2105",
-    "infin;": "\u221e",
-    "infintie;": "\u29dd",
-    "inodot;": "\u0131",
-    "int;": "\u222b",
-    "intcal;": "\u22ba",
-    "integers;": "\u2124",
-    "intercal;": "\u22ba",
-    "intlarhk;": "\u2a17",
-    "intprod;": "\u2a3c",
-    "iocy;": "\u0451",
-    "iogon;": "\u012f",
-    "iopf;": "\U0001d55a",
-    "iota;": "\u03b9",
-    "iprod;": "\u2a3c",
-    "iquest": "\xbf",
-    "iquest;": "\xbf",
-    "iscr;": "\U0001d4be",
-    "isin;": "\u2208",
-    "isinE;": "\u22f9",
-    "isindot;": "\u22f5",
-    "isins;": "\u22f4",
-    "isinsv;": "\u22f3",
-    "isinv;": "\u2208",
-    "it;": "\u2062",
-    "itilde;": "\u0129",
-    "iukcy;": "\u0456",
-    "iuml": "\xef",
-    "iuml;": "\xef",
-    "jcirc;": "\u0135",
-    "jcy;": "\u0439",
-    "jfr;": "\U0001d527",
-    "jmath;": "\u0237",
-    "jopf;": "\U0001d55b",
-    "jscr;": "\U0001d4bf",
-    "jsercy;": "\u0458",
-    "jukcy;": "\u0454",
-    "kappa;": "\u03ba",
-    "kappav;": "\u03f0",
-    "kcedil;": "\u0137",
-    "kcy;": "\u043a",
-    "kfr;": "\U0001d528",
-    "kgreen;": "\u0138",
-    "khcy;": "\u0445",
-    "kjcy;": "\u045c",
-    "kopf;": "\U0001d55c",
-    "kscr;": "\U0001d4c0",
-    "lAarr;": "\u21da",
-    "lArr;": "\u21d0",
-    "lAtail;": "\u291b",
-    "lBarr;": "\u290e",
-    "lE;": "\u2266",
-    "lEg;": "\u2a8b",
-    "lHar;": "\u2962",
-    "lacute;": "\u013a",
-    "laemptyv;": "\u29b4",
-    "lagran;": "\u2112",
-    "lambda;": "\u03bb",
-    "lang;": "\u27e8",
-    "langd;": "\u2991",
-    "langle;": "\u27e8",
-    "lap;": "\u2a85",
-    "laquo": "\xab",
-    "laquo;": "\xab",
-    "larr;": "\u2190",
-    "larrb;": "\u21e4",
-    "larrbfs;": "\u291f",
-    "larrfs;": "\u291d",
-    "larrhk;": "\u21a9",
-    "larrlp;": "\u21ab",
-    "larrpl;": "\u2939",
-    "larrsim;": "\u2973",
-    "larrtl;": "\u21a2",
-    "lat;": "\u2aab",
-    "latail;": "\u2919",
-    "late;": "\u2aad",
-    "lates;": "\u2aad\ufe00",
-    "lbarr;": "\u290c",
-    "lbbrk;": "\u2772",
-    "lbrace;": "{",
-    "lbrack;": "[",
-    "lbrke;": "\u298b",
-    "lbrksld;": "\u298f",
-    "lbrkslu;": "\u298d",
-    "lcaron;": "\u013e",
-    "lcedil;": "\u013c",
-    "lceil;": "\u2308",
-    "lcub;": "{",
-    "lcy;": "\u043b",
-    "ldca;": "\u2936",
-    "ldquo;": "\u201c",
-    "ldquor;": "\u201e",
-    "ldrdhar;": "\u2967",
-    "ldrushar;": "\u294b",
-    "ldsh;": "\u21b2",
-    "le;": "\u2264",
-    "leftarrow;": "\u2190",
-    "leftarrowtail;": "\u21a2",
-    "leftharpoondown;": "\u21bd",
-    "leftharpoonup;": "\u21bc",
-    "leftleftarrows;": "\u21c7",
-    "leftrightarrow;": "\u2194",
-    "leftrightarrows;": "\u21c6",
-    "leftrightharpoons;": "\u21cb",
-    "leftrightsquigarrow;": "\u21ad",
-    "leftthreetimes;": "\u22cb",
-    "leg;": "\u22da",
-    "leq;": "\u2264",
-    "leqq;": "\u2266",
-    "leqslant;": "\u2a7d",
-    "les;": "\u2a7d",
-    "lescc;": "\u2aa8",
-    "lesdot;": "\u2a7f",
-    "lesdoto;": "\u2a81",
-    "lesdotor;": "\u2a83",
-    "lesg;": "\u22da\ufe00",
-    "lesges;": "\u2a93",
-    "lessapprox;": "\u2a85",
-    "lessdot;": "\u22d6",
-    "lesseqgtr;": "\u22da",
-    "lesseqqgtr;": "\u2a8b",
-    "lessgtr;": "\u2276",
-    "lesssim;": "\u2272",
-    "lfisht;": "\u297c",
-    "lfloor;": "\u230a",
-    "lfr;": "\U0001d529",
-    "lg;": "\u2276",
-    "lgE;": "\u2a91",
-    "lhard;": "\u21bd",
-    "lharu;": "\u21bc",
-    "lharul;": "\u296a",
-    "lhblk;": "\u2584",
-    "ljcy;": "\u0459",
-    "ll;": "\u226a",
-    "llarr;": "\u21c7",
-    "llcorner;": "\u231e",
-    "llhard;": "\u296b",
-    "lltri;": "\u25fa",
-    "lmidot;": "\u0140",
-    "lmoust;": "\u23b0",
-    "lmoustache;": "\u23b0",
-    "lnE;": "\u2268",
-    "lnap;": "\u2a89",
-    "lnapprox;": "\u2a89",
-    "lne;": "\u2a87",
-    "lneq;": "\u2a87",
-    "lneqq;": "\u2268",
-    "lnsim;": "\u22e6",
-    "loang;": "\u27ec",
-    "loarr;": "\u21fd",
-    "lobrk;": "\u27e6",
-    "longleftarrow;": "\u27f5",
-    "longleftrightarrow;": "\u27f7",
-    "longmapsto;": "\u27fc",
-    "longrightarrow;": "\u27f6",
-    "looparrowleft;": "\u21ab",
-    "looparrowright;": "\u21ac",
-    "lopar;": "\u2985",
-    "lopf;": "\U0001d55d",
-    "loplus;": "\u2a2d",
-    "lotimes;": "\u2a34",
-    "lowast;": "\u2217",
-    "lowbar;": "_",
-    "loz;": "\u25ca",
-    "lozenge;": "\u25ca",
-    "lozf;": "\u29eb",
-    "lpar;": "(",
-    "lparlt;": "\u2993",
-    "lrarr;": "\u21c6",
-    "lrcorner;": "\u231f",
-    "lrhar;": "\u21cb",
-    "lrhard;": "\u296d",
-    "lrm;": "\u200e",
-    "lrtri;": "\u22bf",
-    "lsaquo;": "\u2039",
-    "lscr;": "\U0001d4c1",
-    "lsh;": "\u21b0",
-    "lsim;": "\u2272",
-    "lsime;": "\u2a8d",
-    "lsimg;": "\u2a8f",
-    "lsqb;": "[",
-    "lsquo;": "\u2018",
-    "lsquor;": "\u201a",
-    "lstrok;": "\u0142",
-    "lt": "<",
-    "lt;": "<",
-    "ltcc;": "\u2aa6",
-    "ltcir;": "\u2a79",
-    "ltdot;": "\u22d6",
-    "lthree;": "\u22cb",
-    "ltimes;": "\u22c9",
-    "ltlarr;": "\u2976",
-    "ltquest;": "\u2a7b",
-    "ltrPar;": "\u2996",
-    "ltri;": "\u25c3",
-    "ltrie;": "\u22b4",
-    "ltrif;": "\u25c2",
-    "lurdshar;": "\u294a",
-    "luruhar;": "\u2966",
-    "lvertneqq;": "\u2268\ufe00",
-    "lvnE;": "\u2268\ufe00",
-    "mDDot;": "\u223a",
-    "macr": "\xaf",
-    "macr;": "\xaf",
-    "male;": "\u2642",
-    "malt;": "\u2720",
-    "maltese;": "\u2720",
-    "map;": "\u21a6",
-    "mapsto;": "\u21a6",
-    "mapstodown;": "\u21a7",
-    "mapstoleft;": "\u21a4",
-    "mapstoup;": "\u21a5",
-    "marker;": "\u25ae",
-    "mcomma;": "\u2a29",
-    "mcy;": "\u043c",
-    "mdash;": "\u2014",
-    "measuredangle;": "\u2221",
-    "mfr;": "\U0001d52a",
-    "mho;": "\u2127",
-    "micro": "\xb5",
-    "micro;": "\xb5",
-    "mid;": "\u2223",
-    "midast;": "*",
-    "midcir;": "\u2af0",
-    "middot": "\xb7",
-    "middot;": "\xb7",
-    "minus;": "\u2212",
-    "minusb;": "\u229f",
-    "minusd;": "\u2238",
-    "minusdu;": "\u2a2a",
-    "mlcp;": "\u2adb",
-    "mldr;": "\u2026",
-    "mnplus;": "\u2213",
-    "models;": "\u22a7",
-    "mopf;": "\U0001d55e",
-    "mp;": "\u2213",
-    "mscr;": "\U0001d4c2",
-    "mstpos;": "\u223e",
-    "mu;": "\u03bc",
-    "multimap;": "\u22b8",
-    "mumap;": "\u22b8",
-    "nGg;": "\u22d9\u0338",
-    "nGt;": "\u226b\u20d2",
-    "nGtv;": "\u226b\u0338",
-    "nLeftarrow;": "\u21cd",
-    "nLeftrightarrow;": "\u21ce",
-    "nLl;": "\u22d8\u0338",
-    "nLt;": "\u226a\u20d2",
-    "nLtv;": "\u226a\u0338",
-    "nRightarrow;": "\u21cf",
-    "nVDash;": "\u22af",
-    "nVdash;": "\u22ae",
-    "nabla;": "\u2207",
-    "nacute;": "\u0144",
-    "nang;": "\u2220\u20d2",
-    "nap;": "\u2249",
-    "napE;": "\u2a70\u0338",
-    "napid;": "\u224b\u0338",
-    "napos;": "\u0149",
-    "napprox;": "\u2249",
-    "natur;": "\u266e",
-    "natural;": "\u266e",
-    "naturals;": "\u2115",
-    "nbsp": "\xa0",
-    "nbsp;": "\xa0",
-    "nbump;": "\u224e\u0338",
-    "nbumpe;": "\u224f\u0338",
-    "ncap;": "\u2a43",
-    "ncaron;": "\u0148",
-    "ncedil;": "\u0146",
-    "ncong;": "\u2247",
-    "ncongdot;": "\u2a6d\u0338",
-    "ncup;": "\u2a42",
-    "ncy;": "\u043d",
-    "ndash;": "\u2013",
-    "ne;": "\u2260",
-    "neArr;": "\u21d7",
-    "nearhk;": "\u2924",
-    "nearr;": "\u2197",
-    "nearrow;": "\u2197",
-    "nedot;": "\u2250\u0338",
-    "nequiv;": "\u2262",
-    "nesear;": "\u2928",
-    "nesim;": "\u2242\u0338",
-    "nexist;": "\u2204",
-    "nexists;": "\u2204",
-    "nfr;": "\U0001d52b",
-    "ngE;": "\u2267\u0338",
-    "nge;": "\u2271",
-    "ngeq;": "\u2271",
-    "ngeqq;": "\u2267\u0338",
-    "ngeqslant;": "\u2a7e\u0338",
-    "nges;": "\u2a7e\u0338",
-    "ngsim;": "\u2275",
-    "ngt;": "\u226f",
-    "ngtr;": "\u226f",
-    "nhArr;": "\u21ce",
-    "nharr;": "\u21ae",
-    "nhpar;": "\u2af2",
-    "ni;": "\u220b",
-    "nis;": "\u22fc",
-    "nisd;": "\u22fa",
-    "niv;": "\u220b",
-    "njcy;": "\u045a",
-    "nlArr;": "\u21cd",
-    "nlE;": "\u2266\u0338",
-    "nlarr;": "\u219a",
-    "nldr;": "\u2025",
-    "nle;": "\u2270",
-    "nleftarrow;": "\u219a",
-    "nleftrightarrow;": "\u21ae",
-    "nleq;": "\u2270",
-    "nleqq;": "\u2266\u0338",
-    "nleqslant;": "\u2a7d\u0338",
-    "nles;": "\u2a7d\u0338",
-    "nless;": "\u226e",
-    "nlsim;": "\u2274",
-    "nlt;": "\u226e",
-    "nltri;": "\u22ea",
-    "nltrie;": "\u22ec",
-    "nmid;": "\u2224",
-    "nopf;": "\U0001d55f",
-    "not": "\xac",
-    "not;": "\xac",
-    "notin;": "\u2209",
-    "notinE;": "\u22f9\u0338",
-    "notindot;": "\u22f5\u0338",
-    "notinva;": "\u2209",
-    "notinvb;": "\u22f7",
-    "notinvc;": "\u22f6",
-    "notni;": "\u220c",
-    "notniva;": "\u220c",
-    "notnivb;": "\u22fe",
-    "notnivc;": "\u22fd",
-    "npar;": "\u2226",
-    "nparallel;": "\u2226",
-    "nparsl;": "\u2afd\u20e5",
-    "npart;": "\u2202\u0338",
-    "npolint;": "\u2a14",
-    "npr;": "\u2280",
-    "nprcue;": "\u22e0",
-    "npre;": "\u2aaf\u0338",
-    "nprec;": "\u2280",
-    "npreceq;": "\u2aaf\u0338",
-    "nrArr;": "\u21cf",
-    "nrarr;": "\u219b",
-    "nrarrc;": "\u2933\u0338",
-    "nrarrw;": "\u219d\u0338",
-    "nrightarrow;": "\u219b",
-    "nrtri;": "\u22eb",
-    "nrtrie;": "\u22ed",
-    "nsc;": "\u2281",
-    "nsccue;": "\u22e1",
-    "nsce;": "\u2ab0\u0338",
-    "nscr;": "\U0001d4c3",
-    "nshortmid;": "\u2224",
-    "nshortparallel;": "\u2226",
-    "nsim;": "\u2241",
-    "nsime;": "\u2244",
-    "nsimeq;": "\u2244",
-    "nsmid;": "\u2224",
-    "nspar;": "\u2226",
-    "nsqsube;": "\u22e2",
-    "nsqsupe;": "\u22e3",
-    "nsub;": "\u2284",
-    "nsubE;": "\u2ac5\u0338",
-    "nsube;": "\u2288",
-    "nsubset;": "\u2282\u20d2",
-    "nsubseteq;": "\u2288",
-    "nsubseteqq;": "\u2ac5\u0338",
-    "nsucc;": "\u2281",
-    "nsucceq;": "\u2ab0\u0338",
-    "nsup;": "\u2285",
-    "nsupE;": "\u2ac6\u0338",
-    "nsupe;": "\u2289",
-    "nsupset;": "\u2283\u20d2",
-    "nsupseteq;": "\u2289",
-    "nsupseteqq;": "\u2ac6\u0338",
-    "ntgl;": "\u2279",
-    "ntilde": "\xf1",
-    "ntilde;": "\xf1",
-    "ntlg;": "\u2278",
-    "ntriangleleft;": "\u22ea",
-    "ntrianglelefteq;": "\u22ec",
-    "ntriangleright;": "\u22eb",
-    "ntrianglerighteq;": "\u22ed",
-    "nu;": "\u03bd",
-    "num;": "#",
-    "numero;": "\u2116",
-    "numsp;": "\u2007",
-    "nvDash;": "\u22ad",
-    "nvHarr;": "\u2904",
-    "nvap;": "\u224d\u20d2",
-    "nvdash;": "\u22ac",
-    "nvge;": "\u2265\u20d2",
-    "nvgt;": ">\u20d2",
-    "nvinfin;": "\u29de",
-    "nvlArr;": "\u2902",
-    "nvle;": "\u2264\u20d2",
-    "nvlt;": "<\u20d2",
-    "nvltrie;": "\u22b4\u20d2",
-    "nvrArr;": "\u2903",
-    "nvrtrie;": "\u22b5\u20d2",
-    "nvsim;": "\u223c\u20d2",
-    "nwArr;": "\u21d6",
-    "nwarhk;": "\u2923",
-    "nwarr;": "\u2196",
-    "nwarrow;": "\u2196",
-    "nwnear;": "\u2927",
-    "oS;": "\u24c8",
-    "oacute": "\xf3",
-    "oacute;": "\xf3",
-    "oast;": "\u229b",
-    "ocir;": "\u229a",
-    "ocirc": "\xf4",
-    "ocirc;": "\xf4",
-    "ocy;": "\u043e",
-    "odash;": "\u229d",
-    "odblac;": "\u0151",
-    "odiv;": "\u2a38",
-    "odot;": "\u2299",
-    "odsold;": "\u29bc",
-    "oelig;": "\u0153",
-    "ofcir;": "\u29bf",
-    "ofr;": "\U0001d52c",
-    "ogon;": "\u02db",
-    "ograve": "\xf2",
-    "ograve;": "\xf2",
-    "ogt;": "\u29c1",
-    "ohbar;": "\u29b5",
-    "ohm;": "\u03a9",
-    "oint;": "\u222e",
-    "olarr;": "\u21ba",
-    "olcir;": "\u29be",
-    "olcross;": "\u29bb",
-    "oline;": "\u203e",
-    "olt;": "\u29c0",
-    "omacr;": "\u014d",
-    "omega;": "\u03c9",
-    "omicron;": "\u03bf",
-    "omid;": "\u29b6",
-    "ominus;": "\u2296",
-    "oopf;": "\U0001d560",
-    "opar;": "\u29b7",
-    "operp;": "\u29b9",
-    "oplus;": "\u2295",
-    "or;": "\u2228",
-    "orarr;": "\u21bb",
-    "ord;": "\u2a5d",
-    "order;": "\u2134",
-    "orderof;": "\u2134",
-    "ordf": "\xaa",
-    "ordf;": "\xaa",
-    "ordm": "\xba",
-    "ordm;": "\xba",
-    "origof;": "\u22b6",
-    "oror;": "\u2a56",
-    "orslope;": "\u2a57",
-    "orv;": "\u2a5b",
-    "oscr;": "\u2134",
-    "oslash": "\xf8",
-    "oslash;": "\xf8",
-    "osol;": "\u2298",
-    "otilde": "\xf5",
-    "otilde;": "\xf5",
-    "otimes;": "\u2297",
-    "otimesas;": "\u2a36",
-    "ouml": "\xf6",
-    "ouml;": "\xf6",
-    "ovbar;": "\u233d",
-    "par;": "\u2225",
-    "para": "\xb6",
-    "para;": "\xb6",
-    "parallel;": "\u2225",
-    "parsim;": "\u2af3",
-    "parsl;": "\u2afd",
-    "part;": "\u2202",
-    "pcy;": "\u043f",
-    "percnt;": "%",
-    "period;": ".",
-    "permil;": "\u2030",
-    "perp;": "\u22a5",
-    "pertenk;": "\u2031",
-    "pfr;": "\U0001d52d",
-    "phi;": "\u03c6",
-    "phiv;": "\u03d5",
-    "phmmat;": "\u2133",
-    "phone;": "\u260e",
-    "pi;": "\u03c0",
-    "pitchfork;": "\u22d4",
-    "piv;": "\u03d6",
-    "planck;": "\u210f",
-    "planckh;": "\u210e",
-    "plankv;": "\u210f",
-    "plus;": "+",
-    "plusacir;": "\u2a23",
-    "plusb;": "\u229e",
-    "pluscir;": "\u2a22",
-    "plusdo;": "\u2214",
-    "plusdu;": "\u2a25",
-    "pluse;": "\u2a72",
-    "plusmn": "\xb1",
-    "plusmn;": "\xb1",
-    "plussim;": "\u2a26",
-    "plustwo;": "\u2a27",
-    "pm;": "\xb1",
-    "pointint;": "\u2a15",
-    "popf;": "\U0001d561",
-    "pound": "\xa3",
-    "pound;": "\xa3",
-    "pr;": "\u227a",
-    "prE;": "\u2ab3",
-    "prap;": "\u2ab7",
-    "prcue;": "\u227c",
-    "pre;": "\u2aaf",
-    "prec;": "\u227a",
-    "precapprox;": "\u2ab7",
-    "preccurlyeq;": "\u227c",
-    "preceq;": "\u2aaf",
-    "precnapprox;": "\u2ab9",
-    "precneqq;": "\u2ab5",
-    "precnsim;": "\u22e8",
-    "precsim;": "\u227e",
-    "prime;": "\u2032",
-    "primes;": "\u2119",
-    "prnE;": "\u2ab5",
-    "prnap;": "\u2ab9",
-    "prnsim;": "\u22e8",
-    "prod;": "\u220f",
-    "profalar;": "\u232e",
-    "profline;": "\u2312",
-    "profsurf;": "\u2313",
-    "prop;": "\u221d",
-    "propto;": "\u221d",
-    "prsim;": "\u227e",
-    "prurel;": "\u22b0",
-    "pscr;": "\U0001d4c5",
-    "psi;": "\u03c8",
-    "puncsp;": "\u2008",
-    "qfr;": "\U0001d52e",
-    "qint;": "\u2a0c",
-    "qopf;": "\U0001d562",
-    "qprime;": "\u2057",
-    "qscr;": "\U0001d4c6",
-    "quaternions;": "\u210d",
-    "quatint;": "\u2a16",
-    "quest;": "?",
-    "questeq;": "\u225f",
-    "quot": "\"",
-    "quot;": "\"",
-    "rAarr;": "\u21db",
-    "rArr;": "\u21d2",
-    "rAtail;": "\u291c",
-    "rBarr;": "\u290f",
-    "rHar;": "\u2964",
-    "race;": "\u223d\u0331",
-    "racute;": "\u0155",
-    "radic;": "\u221a",
-    "raemptyv;": "\u29b3",
-    "rang;": "\u27e9",
-    "rangd;": "\u2992",
-    "range;": "\u29a5",
-    "rangle;": "\u27e9",
-    "raquo": "\xbb",
-    "raquo;": "\xbb",
-    "rarr;": "\u2192",
-    "rarrap;": "\u2975",
-    "rarrb;": "\u21e5",
-    "rarrbfs;": "\u2920",
-    "rarrc;": "\u2933",
-    "rarrfs;": "\u291e",
-    "rarrhk;": "\u21aa",
-    "rarrlp;": "\u21ac",
-    "rarrpl;": "\u2945",
-    "rarrsim;": "\u2974",
-    "rarrtl;": "\u21a3",
-    "rarrw;": "\u219d",
-    "ratail;": "\u291a",
-    "ratio;": "\u2236",
-    "rationals;": "\u211a",
-    "rbarr;": "\u290d",
-    "rbbrk;": "\u2773",
-    "rbrace;": "}",
-    "rbrack;": "]",
-    "rbrke;": "\u298c",
-    "rbrksld;": "\u298e",
-    "rbrkslu;": "\u2990",
-    "rcaron;": "\u0159",
-    "rcedil;": "\u0157",
-    "rceil;": "\u2309",
-    "rcub;": "}",
-    "rcy;": "\u0440",
-    "rdca;": "\u2937",
-    "rdldhar;": "\u2969",
-    "rdquo;": "\u201d",
-    "rdquor;": "\u201d",
-    "rdsh;": "\u21b3",
-    "real;": "\u211c",
-    "realine;": "\u211b",
-    "realpart;": "\u211c",
-    "reals;": "\u211d",
-    "rect;": "\u25ad",
-    "reg": "\xae",
-    "reg;": "\xae",
-    "rfisht;": "\u297d",
-    "rfloor;": "\u230b",
-    "rfr;": "\U0001d52f",
-    "rhard;": "\u21c1",
-    "rharu;": "\u21c0",
-    "rharul;": "\u296c",
-    "rho;": "\u03c1",
-    "rhov;": "\u03f1",
-    "rightarrow;": "\u2192",
-    "rightarrowtail;": "\u21a3",
-    "rightharpoondown;": "\u21c1",
-    "rightharpoonup;": "\u21c0",
-    "rightleftarrows;": "\u21c4",
-    "rightleftharpoons;": "\u21cc",
-    "rightrightarrows;": "\u21c9",
-    "rightsquigarrow;": "\u219d",
-    "rightthreetimes;": "\u22cc",
-    "ring;": "\u02da",
-    "risingdotseq;": "\u2253",
-    "rlarr;": "\u21c4",
-    "rlhar;": "\u21cc",
-    "rlm;": "\u200f",
-    "rmoust;": "\u23b1",
-    "rmoustache;": "\u23b1",
-    "rnmid;": "\u2aee",
-    "roang;": "\u27ed",
-    "roarr;": "\u21fe",
-    "robrk;": "\u27e7",
-    "ropar;": "\u2986",
-    "ropf;": "\U0001d563",
-    "roplus;": "\u2a2e",
-    "rotimes;": "\u2a35",
-    "rpar;": ")",
-    "rpargt;": "\u2994",
-    "rppolint;": "\u2a12",
-    "rrarr;": "\u21c9",
-    "rsaquo;": "\u203a",
-    "rscr;": "\U0001d4c7",
-    "rsh;": "\u21b1",
-    "rsqb;": "]",
-    "rsquo;": "\u2019",
-    "rsquor;": "\u2019",
-    "rthree;": "\u22cc",
-    "rtimes;": "\u22ca",
-    "rtri;": "\u25b9",
-    "rtrie;": "\u22b5",
-    "rtrif;": "\u25b8",
-    "rtriltri;": "\u29ce",
-    "ruluhar;": "\u2968",
-    "rx;": "\u211e",
-    "sacute;": "\u015b",
-    "sbquo;": "\u201a",
-    "sc;": "\u227b",
-    "scE;": "\u2ab4",
-    "scap;": "\u2ab8",
-    "scaron;": "\u0161",
-    "sccue;": "\u227d",
-    "sce;": "\u2ab0",
-    "scedil;": "\u015f",
-    "scirc;": "\u015d",
-    "scnE;": "\u2ab6",
-    "scnap;": "\u2aba",
-    "scnsim;": "\u22e9",
-    "scpolint;": "\u2a13",
-    "scsim;": "\u227f",
-    "scy;": "\u0441",
-    "sdot;": "\u22c5",
-    "sdotb;": "\u22a1",
-    "sdote;": "\u2a66",
-    "seArr;": "\u21d8",
-    "searhk;": "\u2925",
-    "searr;": "\u2198",
-    "searrow;": "\u2198",
-    "sect": "\xa7",
-    "sect;": "\xa7",
-    "semi;": ";",
-    "seswar;": "\u2929",
-    "setminus;": "\u2216",
-    "setmn;": "\u2216",
-    "sext;": "\u2736",
-    "sfr;": "\U0001d530",
-    "sfrown;": "\u2322",
-    "sharp;": "\u266f",
-    "shchcy;": "\u0449",
-    "shcy;": "\u0448",
-    "shortmid;": "\u2223",
-    "shortparallel;": "\u2225",
-    "shy": "\xad",
-    "shy;": "\xad",
-    "sigma;": "\u03c3",
-    "sigmaf;": "\u03c2",
-    "sigmav;": "\u03c2",
-    "sim;": "\u223c",
-    "simdot;": "\u2a6a",
-    "sime;": "\u2243",
-    "simeq;": "\u2243",
-    "simg;": "\u2a9e",
-    "simgE;": "\u2aa0",
-    "siml;": "\u2a9d",
-    "simlE;": "\u2a9f",
-    "simne;": "\u2246",
-    "simplus;": "\u2a24",
-    "simrarr;": "\u2972",
-    "slarr;": "\u2190",
-    "smallsetminus;": "\u2216",
-    "smashp;": "\u2a33",
-    "smeparsl;": "\u29e4",
-    "smid;": "\u2223",
-    "smile;": "\u2323",
-    "smt;": "\u2aaa",
-    "smte;": "\u2aac",
-    "smtes;": "\u2aac\ufe00",
-    "softcy;": "\u044c",
-    "sol;": "/",
-    "solb;": "\u29c4",
-    "solbar;": "\u233f",
-    "sopf;": "\U0001d564",
-    "spades;": "\u2660",
-    "spadesuit;": "\u2660",
-    "spar;": "\u2225",
-    "sqcap;": "\u2293",
-    "sqcaps;": "\u2293\ufe00",
-    "sqcup;": "\u2294",
-    "sqcups;": "\u2294\ufe00",
-    "sqsub;": "\u228f",
-    "sqsube;": "\u2291",
-    "sqsubset;": "\u228f",
-    "sqsubseteq;": "\u2291",
-    "sqsup;": "\u2290",
-    "sqsupe;": "\u2292",
-    "sqsupset;": "\u2290",
-    "sqsupseteq;": "\u2292",
-    "squ;": "\u25a1",
-    "square;": "\u25a1",
-    "squarf;": "\u25aa",
-    "squf;": "\u25aa",
-    "srarr;": "\u2192",
-    "sscr;": "\U0001d4c8",
-    "ssetmn;": "\u2216",
-    "ssmile;": "\u2323",
-    "sstarf;": "\u22c6",
-    "star;": "\u2606",
-    "starf;": "\u2605",
-    "straightepsilon;": "\u03f5",
-    "straightphi;": "\u03d5",
-    "strns;": "\xaf",
-    "sub;": "\u2282",
-    "subE;": "\u2ac5",
-    "subdot;": "\u2abd",
-    "sube;": "\u2286",
-    "subedot;": "\u2ac3",
-    "submult;": "\u2ac1",
-    "subnE;": "\u2acb",
-    "subne;": "\u228a",
-    "subplus;": "\u2abf",
-    "subrarr;": "\u2979",
-    "subset;": "\u2282",
-    "subseteq;": "\u2286",
-    "subseteqq;": "\u2ac5",
-    "subsetneq;": "\u228a",
-    "subsetneqq;": "\u2acb",
-    "subsim;": "\u2ac7",
-    "subsub;": "\u2ad5",
-    "subsup;": "\u2ad3",
-    "succ;": "\u227b",
-    "succapprox;": "\u2ab8",
-    "succcurlyeq;": "\u227d",
-    "succeq;": "\u2ab0",
-    "succnapprox;": "\u2aba",
-    "succneqq;": "\u2ab6",
-    "succnsim;": "\u22e9",
-    "succsim;": "\u227f",
-    "sum;": "\u2211",
-    "sung;": "\u266a",
-    "sup1": "\xb9",
-    "sup1;": "\xb9",
-    "sup2": "\xb2",
-    "sup2;": "\xb2",
-    "sup3": "\xb3",
-    "sup3;": "\xb3",
-    "sup;": "\u2283",
-    "supE;": "\u2ac6",
-    "supdot;": "\u2abe",
-    "supdsub;": "\u2ad8",
-    "supe;": "\u2287",
-    "supedot;": "\u2ac4",
-    "suphsol;": "\u27c9",
-    "suphsub;": "\u2ad7",
-    "suplarr;": "\u297b",
-    "supmult;": "\u2ac2",
-    "supnE;": "\u2acc",
-    "supne;": "\u228b",
-    "supplus;": "\u2ac0",
-    "supset;": "\u2283",
-    "supseteq;": "\u2287",
-    "supseteqq;": "\u2ac6",
-    "supsetneq;": "\u228b",
-    "supsetneqq;": "\u2acc",
-    "supsim;": "\u2ac8",
-    "supsub;": "\u2ad4",
-    "supsup;": "\u2ad6",
-    "swArr;": "\u21d9",
-    "swarhk;": "\u2926",
-    "swarr;": "\u2199",
-    "swarrow;": "\u2199",
-    "swnwar;": "\u292a",
-    "szlig": "\xdf",
-    "szlig;": "\xdf",
-    "target;": "\u2316",
-    "tau;": "\u03c4",
-    "tbrk;": "\u23b4",
-    "tcaron;": "\u0165",
-    "tcedil;": "\u0163",
-    "tcy;": "\u0442",
-    "tdot;": "\u20db",
-    "telrec;": "\u2315",
-    "tfr;": "\U0001d531",
-    "there4;": "\u2234",
-    "therefore;": "\u2234",
-    "theta;": "\u03b8",
-    "thetasym;": "\u03d1",
-    "thetav;": "\u03d1",
-    "thickapprox;": "\u2248",
-    "thicksim;": "\u223c",
-    "thinsp;": "\u2009",
-    "thkap;": "\u2248",
-    "thksim;": "\u223c",
-    "thorn": "\xfe",
-    "thorn;": "\xfe",
-    "tilde;": "\u02dc",
-    "times": "\xd7",
-    "times;": "\xd7",
-    "timesb;": "\u22a0",
-    "timesbar;": "\u2a31",
-    "timesd;": "\u2a30",
-    "tint;": "\u222d",
-    "toea;": "\u2928",
-    "top;": "\u22a4",
-    "topbot;": "\u2336",
-    "topcir;": "\u2af1",
-    "topf;": "\U0001d565",
-    "topfork;": "\u2ada",
-    "tosa;": "\u2929",
-    "tprime;": "\u2034",
-    "trade;": "\u2122",
-    "triangle;": "\u25b5",
-    "triangledown;": "\u25bf",
-    "triangleleft;": "\u25c3",
-    "trianglelefteq;": "\u22b4",
-    "triangleq;": "\u225c",
-    "triangleright;": "\u25b9",
-    "trianglerighteq;": "\u22b5",
-    "tridot;": "\u25ec",
-    "trie;": "\u225c",
-    "triminus;": "\u2a3a",
-    "triplus;": "\u2a39",
-    "trisb;": "\u29cd",
-    "tritime;": "\u2a3b",
-    "trpezium;": "\u23e2",
-    "tscr;": "\U0001d4c9",
-    "tscy;": "\u0446",
-    "tshcy;": "\u045b",
-    "tstrok;": "\u0167",
-    "twixt;": "\u226c",
-    "twoheadleftarrow;": "\u219e",
-    "twoheadrightarrow;": "\u21a0",
-    "uArr;": "\u21d1",
-    "uHar;": "\u2963",
-    "uacute": "\xfa",
-    "uacute;": "\xfa",
-    "uarr;": "\u2191",
-    "ubrcy;": "\u045e",
-    "ubreve;": "\u016d",
-    "ucirc": "\xfb",
-    "ucirc;": "\xfb",
-    "ucy;": "\u0443",
-    "udarr;": "\u21c5",
-    "udblac;": "\u0171",
-    "udhar;": "\u296e",
-    "ufisht;": "\u297e",
-    "ufr;": "\U0001d532",
-    "ugrave": "\xf9",
-    "ugrave;": "\xf9",
-    "uharl;": "\u21bf",
-    "uharr;": "\u21be",
-    "uhblk;": "\u2580",
-    "ulcorn;": "\u231c",
-    "ulcorner;": "\u231c",
-    "ulcrop;": "\u230f",
-    "ultri;": "\u25f8",
-    "umacr;": "\u016b",
-    "uml": "\xa8",
-    "uml;": "\xa8",
-    "uogon;": "\u0173",
-    "uopf;": "\U0001d566",
-    "uparrow;": "\u2191",
-    "updownarrow;": "\u2195",
-    "upharpoonleft;": "\u21bf",
-    "upharpoonright;": "\u21be",
-    "uplus;": "\u228e",
-    "upsi;": "\u03c5",
-    "upsih;": "\u03d2",
-    "upsilon;": "\u03c5",
-    "upuparrows;": "\u21c8",
-    "urcorn;": "\u231d",
-    "urcorner;": "\u231d",
-    "urcrop;": "\u230e",
-    "uring;": "\u016f",
-    "urtri;": "\u25f9",
-    "uscr;": "\U0001d4ca",
-    "utdot;": "\u22f0",
-    "utilde;": "\u0169",
-    "utri;": "\u25b5",
-    "utrif;": "\u25b4",
-    "uuarr;": "\u21c8",
-    "uuml": "\xfc",
-    "uuml;": "\xfc",
-    "uwangle;": "\u29a7",
-    "vArr;": "\u21d5",
-    "vBar;": "\u2ae8",
-    "vBarv;": "\u2ae9",
-    "vDash;": "\u22a8",
-    "vangrt;": "\u299c",
-    "varepsilon;": "\u03f5",
-    "varkappa;": "\u03f0",
-    "varnothing;": "\u2205",
-    "varphi;": "\u03d5",
-    "varpi;": "\u03d6",
-    "varpropto;": "\u221d",
-    "varr;": "\u2195",
-    "varrho;": "\u03f1",
-    "varsigma;": "\u03c2",
-    "varsubsetneq;": "\u228a\ufe00",
-    "varsubsetneqq;": "\u2acb\ufe00",
-    "varsupsetneq;": "\u228b\ufe00",
-    "varsupsetneqq;": "\u2acc\ufe00",
-    "vartheta;": "\u03d1",
-    "vartriangleleft;": "\u22b2",
-    "vartriangleright;": "\u22b3",
-    "vcy;": "\u0432",
-    "vdash;": "\u22a2",
-    "vee;": "\u2228",
-    "veebar;": "\u22bb",
-    "veeeq;": "\u225a",
-    "vellip;": "\u22ee",
-    "verbar;": "|",
-    "vert;": "|",
-    "vfr;": "\U0001d533",
-    "vltri;": "\u22b2",
-    "vnsub;": "\u2282\u20d2",
-    "vnsup;": "\u2283\u20d2",
-    "vopf;": "\U0001d567",
-    "vprop;": "\u221d",
-    "vrtri;": "\u22b3",
-    "vscr;": "\U0001d4cb",
-    "vsubnE;": "\u2acb\ufe00",
-    "vsubne;": "\u228a\ufe00",
-    "vsupnE;": "\u2acc\ufe00",
-    "vsupne;": "\u228b\ufe00",
-    "vzigzag;": "\u299a",
-    "wcirc;": "\u0175",
-    "wedbar;": "\u2a5f",
-    "wedge;": "\u2227",
-    "wedgeq;": "\u2259",
-    "weierp;": "\u2118",
-    "wfr;": "\U0001d534",
-    "wopf;": "\U0001d568",
-    "wp;": "\u2118",
-    "wr;": "\u2240",
-    "wreath;": "\u2240",
-    "wscr;": "\U0001d4cc",
-    "xcap;": "\u22c2",
-    "xcirc;": "\u25ef",
-    "xcup;": "\u22c3",
-    "xdtri;": "\u25bd",
-    "xfr;": "\U0001d535",
-    "xhArr;": "\u27fa",
-    "xharr;": "\u27f7",
-    "xi;": "\u03be",
-    "xlArr;": "\u27f8",
-    "xlarr;": "\u27f5",
-    "xmap;": "\u27fc",
-    "xnis;": "\u22fb",
-    "xodot;": "\u2a00",
-    "xopf;": "\U0001d569",
-    "xoplus;": "\u2a01",
-    "xotime;": "\u2a02",
-    "xrArr;": "\u27f9",
-    "xrarr;": "\u27f6",
-    "xscr;": "\U0001d4cd",
-    "xsqcup;": "\u2a06",
-    "xuplus;": "\u2a04",
-    "xutri;": "\u25b3",
-    "xvee;": "\u22c1",
-    "xwedge;": "\u22c0",
-    "yacute": "\xfd",
-    "yacute;": "\xfd",
-    "yacy;": "\u044f",
-    "ycirc;": "\u0177",
-    "ycy;": "\u044b",
-    "yen": "\xa5",
-    "yen;": "\xa5",
-    "yfr;": "\U0001d536",
-    "yicy;": "\u0457",
-    "yopf;": "\U0001d56a",
-    "yscr;": "\U0001d4ce",
-    "yucy;": "\u044e",
-    "yuml": "\xff",
-    "yuml;": "\xff",
-    "zacute;": "\u017a",
-    "zcaron;": "\u017e",
-    "zcy;": "\u0437",
-    "zdot;": "\u017c",
-    "zeetrf;": "\u2128",
-    "zeta;": "\u03b6",
-    "zfr;": "\U0001d537",
-    "zhcy;": "\u0436",
-    "zigrarr;": "\u21dd",
-    "zopf;": "\U0001d56b",
-    "zscr;": "\U0001d4cf",
-    "zwj;": "\u200d",
-    "zwnj;": "\u200c",
-}
-
-replacementCharacters = {
-    0x0: "\uFFFD",
-    0x0d: "\u000D",
-    0x80: "\u20AC",
-    0x81: "\u0081",
-    0x82: "\u201A",
-    0x83: "\u0192",
-    0x84: "\u201E",
-    0x85: "\u2026",
-    0x86: "\u2020",
-    0x87: "\u2021",
-    0x88: "\u02C6",
-    0x89: "\u2030",
-    0x8A: "\u0160",
-    0x8B: "\u2039",
-    0x8C: "\u0152",
-    0x8D: "\u008D",
-    0x8E: "\u017D",
-    0x8F: "\u008F",
-    0x90: "\u0090",
-    0x91: "\u2018",
-    0x92: "\u2019",
-    0x93: "\u201C",
-    0x94: "\u201D",
-    0x95: "\u2022",
-    0x96: "\u2013",
-    0x97: "\u2014",
-    0x98: "\u02DC",
-    0x99: "\u2122",
-    0x9A: "\u0161",
-    0x9B: "\u203A",
-    0x9C: "\u0153",
-    0x9D: "\u009D",
-    0x9E: "\u017E",
-    0x9F: "\u0178",
-}
-
-tokenTypes = {
-    "Doctype": 0,
-    "Characters": 1,
-    "SpaceCharacters": 2,
-    "StartTag": 3,
-    "EndTag": 4,
-    "EmptyTag": 5,
-    "Comment": 6,
-    "ParseError": 7
-}
-
-tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],
-                           tokenTypes["EmptyTag"]])
-
-
-prefixes = dict([(v, k) for k, v in namespaces.items()])
-prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
-
-
-class DataLossWarning(UserWarning):
-    """Raised when the current tree is unable to represent the input data"""
-    pass
-
-
-class _ReparseException(Exception):
-    pass
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py
deleted file mode 100644
index 5ba926e..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from . import base
-
-from collections import OrderedDict
-
-
-def _attr_key(attr):
-    """Return an appropriate key for an attribute for sorting
-
-    Attributes have a namespace that can be either ``None`` or a string. We
-    can't compare the two because they're different types, so we convert
-    ``None`` to an empty string first.
-
-    """
-    return (attr[0][0] or ''), attr[0][1]
-
-
-class Filter(base.Filter):
-    """Alphabetizes attributes for elements"""
-    def __iter__(self):
-        for token in base.Filter.__iter__(self):
-            if token["type"] in ("StartTag", "EmptyTag"):
-                attrs = OrderedDict()
-                for name, value in sorted(token["data"].items(),
-                                          key=_attr_key):
-                    attrs[name] = value
-                token["data"] = attrs
-            yield token
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py
deleted file mode 100644
index c7dbaed..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/base.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-
-class Filter(object):
-    def __init__(self, source):
-        self.source = source
-
-    def __iter__(self):
-        return iter(self.source)
-
-    def __getattr__(self, name):
-        return getattr(self.source, name)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py
deleted file mode 100644
index aefb5c8..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py
+++ /dev/null
@@ -1,73 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from . import base
-
-
-class Filter(base.Filter):
-    """Injects ```` tag into head of document"""
-    def __init__(self, source, encoding):
-        """Creates a Filter
-
-        :arg source: the source token stream
-
-        :arg encoding: the encoding to set
-
-        """
-        base.Filter.__init__(self, source)
-        self.encoding = encoding
-
-    def __iter__(self):
-        state = "pre_head"
-        meta_found = (self.encoding is None)
-        pending = []
-
-        for token in base.Filter.__iter__(self):
-            type = token["type"]
-            if type == "StartTag":
-                if token["name"].lower() == "head":
-                    state = "in_head"
-
-            elif type == "EmptyTag":
-                if token["name"].lower() == "meta":
-                    # replace charset with actual encoding
-                    has_http_equiv_content_type = False
-                    for (namespace, name), value in token["data"].items():
-                        if namespace is not None:
-                            continue
-                        elif name.lower() == 'charset':
-                            token["data"][(namespace, name)] = self.encoding
-                            meta_found = True
-                            break
-                        elif name == 'http-equiv' and value.lower() == 'content-type':
-                            has_http_equiv_content_type = True
-                    else:
-                        if has_http_equiv_content_type and (None, "content") in token["data"]:
-                            token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
-                            meta_found = True
-
-                elif token["name"].lower() == "head" and not meta_found:
-                    # insert meta into empty head
-                    yield {"type": "StartTag", "name": "head",
-                           "data": token["data"]}
-                    yield {"type": "EmptyTag", "name": "meta",
-                           "data": {(None, "charset"): self.encoding}}
-                    yield {"type": "EndTag", "name": "head"}
-                    meta_found = True
-                    continue
-
-            elif type == "EndTag":
-                if token["name"].lower() == "head" and pending:
-                    # insert meta into head (if necessary) and flush pending queue
-                    yield pending.pop(0)
-                    if not meta_found:
-                        yield {"type": "EmptyTag", "name": "meta",
-                               "data": {(None, "charset"): self.encoding}}
-                    while pending:
-                        yield pending.pop(0)
-                    meta_found = True
-                    state = "post_head"
-
-            if state == "in_head":
-                pending.append(token)
-            else:
-                yield token
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py
deleted file mode 100644
index fcc07ee..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/lint.py
+++ /dev/null
@@ -1,93 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from pip._vendor.six import text_type
-
-from . import base
-from ..constants import namespaces, voidElements
-
-from ..constants import spaceCharacters
-spaceCharacters = "".join(spaceCharacters)
-
-
-class Filter(base.Filter):
-    """Lints the token stream for errors
-
-    If it finds any errors, it'll raise an ``AssertionError``.
-
-    """
-    def __init__(self, source, require_matching_tags=True):
-        """Creates a Filter
-
-        :arg source: the source token stream
-
-        :arg require_matching_tags: whether or not to require matching tags
-
-        """
-        super(Filter, self).__init__(source)
-        self.require_matching_tags = require_matching_tags
-
-    def __iter__(self):
-        open_elements = []
-        for token in base.Filter.__iter__(self):
-            type = token["type"]
-            if type in ("StartTag", "EmptyTag"):
-                namespace = token["namespace"]
-                name = token["name"]
-                assert namespace is None or isinstance(namespace, text_type)
-                assert namespace != ""
-                assert isinstance(name, text_type)
-                assert name != ""
-                assert isinstance(token["data"], dict)
-                if (not namespace or namespace == namespaces["html"]) and name in voidElements:
-                    assert type == "EmptyTag"
-                else:
-                    assert type == "StartTag"
-                if type == "StartTag" and self.require_matching_tags:
-                    open_elements.append((namespace, name))
-                for (namespace, name), value in token["data"].items():
-                    assert namespace is None or isinstance(namespace, text_type)
-                    assert namespace != ""
-                    assert isinstance(name, text_type)
-                    assert name != ""
-                    assert isinstance(value, text_type)
-
-            elif type == "EndTag":
-                namespace = token["namespace"]
-                name = token["name"]
-                assert namespace is None or isinstance(namespace, text_type)
-                assert namespace != ""
-                assert isinstance(name, text_type)
-                assert name != ""
-                if (not namespace or namespace == namespaces["html"]) and name in voidElements:
-                    assert False, "Void element reported as EndTag token: %(tag)s" % {"tag": name}
-                elif self.require_matching_tags:
-                    start = open_elements.pop()
-                    assert start == (namespace, name)
-
-            elif type == "Comment":
-                data = token["data"]
-                assert isinstance(data, text_type)
-
-            elif type in ("Characters", "SpaceCharacters"):
-                data = token["data"]
-                assert isinstance(data, text_type)
-                assert data != ""
-                if type == "SpaceCharacters":
-                    assert data.strip(spaceCharacters) == ""
-
-            elif type == "Doctype":
-                name = token["name"]
-                assert name is None or isinstance(name, text_type)
-                assert token["publicId"] is None or isinstance(name, text_type)
-                assert token["systemId"] is None or isinstance(name, text_type)
-
-            elif type == "Entity":
-                assert isinstance(token["name"], text_type)
-
-            elif type == "SerializerError":
-                assert isinstance(token["data"], text_type)
-
-            else:
-                assert False, "Unknown token type: %(type)s" % {"type": type}
-
-            yield token
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py
deleted file mode 100644
index 4a86501..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/optionaltags.py
+++ /dev/null
@@ -1,207 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-from . import base
-
-
-class Filter(base.Filter):
-    """Removes optional tags from the token stream"""
-    def slider(self):
-        previous1 = previous2 = None
-        for token in self.source:
-            if previous1 is not None:
-                yield previous2, previous1, token
-            previous2 = previous1
-            previous1 = token
-        if previous1 is not None:
-            yield previous2, previous1, None
-
-    def __iter__(self):
-        for previous, token, next in self.slider():
-            type = token["type"]
-            if type == "StartTag":
-                if (token["data"] or
-                        not self.is_optional_start(token["name"], previous, next)):
-                    yield token
-            elif type == "EndTag":
-                if not self.is_optional_end(token["name"], next):
-                    yield token
-            else:
-                yield token
-
-    def is_optional_start(self, tagname, previous, next):
-        type = next and next["type"] or None
-        if tagname in 'html':
-            # An html element's start tag may be omitted if the first thing
-            # inside the html element is not a space character or a comment.
-            return type not in ("Comment", "SpaceCharacters")
-        elif tagname == 'head':
-            # A head element's start tag may be omitted if the first thing
-            # inside the head element is an element.
-            # XXX: we also omit the start tag if the head element is empty
-            if type in ("StartTag", "EmptyTag"):
-                return True
-            elif type == "EndTag":
-                return next["name"] == "head"
-        elif tagname == 'body':
-            # A body element's start tag may be omitted if the first thing
-            # inside the body element is not a space character or a comment,
-            # except if the first thing inside the body element is a script
-            # or style element and the node immediately preceding the body
-            # element is a head element whose end tag has been omitted.
-            if type in ("Comment", "SpaceCharacters"):
-                return False
-            elif type == "StartTag":
-                # XXX: we do not look at the preceding event, so we never omit
-                # the body element's start tag if it's followed by a script or
-                # a style element.
-                return next["name"] not in ('script', 'style')
-            else:
-                return True
-        elif tagname == 'colgroup':
-            # A colgroup element's start tag may be omitted if the first thing
-            # inside the colgroup element is a col element, and if the element
-            # is not immediately preceded by another colgroup element whose
-            # end tag has been omitted.
-            if type in ("StartTag", "EmptyTag"):
-                # XXX: we do not look at the preceding event, so instead we never
-                # omit the colgroup element's end tag when it is immediately
-                # followed by another colgroup element. See is_optional_end.
-                return next["name"] == "col"
-            else:
-                return False
-        elif tagname == 'tbody':
-            # A tbody element's start tag may be omitted if the first thing
-            # inside the tbody element is a tr element, and if the element is
-            # not immediately preceded by a tbody, thead, or tfoot element
-            # whose end tag has been omitted.
-            if type == "StartTag":
-                # omit the thead and tfoot elements' end tag when they are
-                # immediately followed by a tbody element. See is_optional_end.
-                if previous and previous['type'] == 'EndTag' and \
-                        previous['name'] in ('tbody', 'thead', 'tfoot'):
-                    return False
-                return next["name"] == 'tr'
-            else:
-                return False
-        return False
-
-    def is_optional_end(self, tagname, next):
-        type = next and next["type"] or None
-        if tagname in ('html', 'head', 'body'):
-            # An html element's end tag may be omitted if the html element
-            # is not immediately followed by a space character or a comment.
-            return type not in ("Comment", "SpaceCharacters")
-        elif tagname in ('li', 'optgroup', 'tr'):
-            # A li element's end tag may be omitted if the li element is
-            # immediately followed by another li element or if there is
-            # no more content in the parent element.
-            # An optgroup element's end tag may be omitted if the optgroup
-            # element is immediately followed by another optgroup element,
-            # or if there is no more content in the parent element.
-            # A tr element's end tag may be omitted if the tr element is
-            # immediately followed by another tr element, or if there is
-            # no more content in the parent element.
-            if type == "StartTag":
-                return next["name"] == tagname
-            else:
-                return type == "EndTag" or type is None
-        elif tagname in ('dt', 'dd'):
-            # A dt element's end tag may be omitted if the dt element is
-            # immediately followed by another dt element or a dd element.
-            # A dd element's end tag may be omitted if the dd element is
-            # immediately followed by another dd element or a dt element,
-            # or if there is no more content in the parent element.
-            if type == "StartTag":
-                return next["name"] in ('dt', 'dd')
-            elif tagname == 'dd':
-                return type == "EndTag" or type is None
-            else:
-                return False
-        elif tagname == 'p':
-            # A p element's end tag may be omitted if the p element is
-            # immediately followed by an address, article, aside,
-            # blockquote, datagrid, dialog, dir, div, dl, fieldset,
-            # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
-            # nav, ol, p, pre, section, table, or ul, element, or if
-            # there is no more content in the parent element.
-            if type in ("StartTag", "EmptyTag"):
-                return next["name"] in ('address', 'article', 'aside',
-                                        'blockquote', 'datagrid', 'dialog',
-                                        'dir', 'div', 'dl', 'fieldset', 'footer',
-                                        'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
-                                        'header', 'hr', 'menu', 'nav', 'ol',
-                                        'p', 'pre', 'section', 'table', 'ul')
-            else:
-                return type == "EndTag" or type is None
-        elif tagname == 'option':
-            # An option element's end tag may be omitted if the option
-            # element is immediately followed by another option element,
-            # or if it is immediately followed by an optgroup
-            # element, or if there is no more content in the parent
-            # element.
-            if type == "StartTag":
-                return next["name"] in ('option', 'optgroup')
-            else:
-                return type == "EndTag" or type is None
-        elif tagname in ('rt', 'rp'):
-            # An rt element's end tag may be omitted if the rt element is
-            # immediately followed by an rt or rp element, or if there is
-            # no more content in the parent element.
-            # An rp element's end tag may be omitted if the rp element is
-            # immediately followed by an rt or rp element, or if there is
-            # no more content in the parent element.
-            if type == "StartTag":
-                return next["name"] in ('rt', 'rp')
-            else:
-                return type == "EndTag" or type is None
-        elif tagname == 'colgroup':
-            # A colgroup element's end tag may be omitted if the colgroup
-            # element is not immediately followed by a space character or
-            # a comment.
-            if type in ("Comment", "SpaceCharacters"):
-                return False
-            elif type == "StartTag":
-                # XXX: we also look for an immediately following colgroup
-                # element. See is_optional_start.
-                return next["name"] != 'colgroup'
-            else:
-                return True
-        elif tagname in ('thead', 'tbody'):
-            # A thead element's end tag may be omitted if the thead element
-            # is immediately followed by a tbody or tfoot element.
-            # A tbody element's end tag may be omitted if the tbody element
-            # is immediately followed by a tbody or tfoot element, or if
-            # there is no more content in the parent element.
-            # A tfoot element's end tag may be omitted if the tfoot element
-            # is immediately followed by a tbody element, or if there is no
-            # more content in the parent element.
-            # XXX: we never omit the end tag when the following element is
-            # a tbody. See is_optional_start.
-            if type == "StartTag":
-                return next["name"] in ['tbody', 'tfoot']
-            elif tagname == 'tbody':
-                return type == "EndTag" or type is None
-            else:
-                return False
-        elif tagname == 'tfoot':
-            # A tfoot element's end tag may be omitted if the tfoot element
-            # is immediately followed by a tbody element, or if there is no
-            # more content in the parent element.
-            # XXX: we never omit the end tag when the following element is
-            # a tbody. See is_optional_start.
-            if type == "StartTag":
-                return next["name"] == 'tbody'
-            else:
-                return type == "EndTag" or type is None
-        elif tagname in ('td', 'th'):
-            # A td element's end tag may be omitted if the td element is
-            # immediately followed by a td or th element, or if there is
-            # no more content in the parent element.
-            # A th element's end tag may be omitted if the th element is
-            # immediately followed by a td or th element, or if there is
-            # no more content in the parent element.
-            if type == "StartTag":
-                return next["name"] in ('td', 'th')
-            else:
-                return type == "EndTag" or type is None
-        return False
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
deleted file mode 100644
index af8e77b..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/sanitizer.py
+++ /dev/null
@@ -1,896 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-import re
-from xml.sax.saxutils import escape, unescape
-
-from pip._vendor.six.moves import urllib_parse as urlparse
-
-from . import base
-from ..constants import namespaces, prefixes
-
-__all__ = ["Filter"]
-
-
-allowed_elements = frozenset((
-    (namespaces['html'], 'a'),
-    (namespaces['html'], 'abbr'),
-    (namespaces['html'], 'acronym'),
-    (namespaces['html'], 'address'),
-    (namespaces['html'], 'area'),
-    (namespaces['html'], 'article'),
-    (namespaces['html'], 'aside'),
-    (namespaces['html'], 'audio'),
-    (namespaces['html'], 'b'),
-    (namespaces['html'], 'big'),
-    (namespaces['html'], 'blockquote'),
-    (namespaces['html'], 'br'),
-    (namespaces['html'], 'button'),
-    (namespaces['html'], 'canvas'),
-    (namespaces['html'], 'caption'),
-    (namespaces['html'], 'center'),
-    (namespaces['html'], 'cite'),
-    (namespaces['html'], 'code'),
-    (namespaces['html'], 'col'),
-    (namespaces['html'], 'colgroup'),
-    (namespaces['html'], 'command'),
-    (namespaces['html'], 'datagrid'),
-    (namespaces['html'], 'datalist'),
-    (namespaces['html'], 'dd'),
-    (namespaces['html'], 'del'),
-    (namespaces['html'], 'details'),
-    (namespaces['html'], 'dfn'),
-    (namespaces['html'], 'dialog'),
-    (namespaces['html'], 'dir'),
-    (namespaces['html'], 'div'),
-    (namespaces['html'], 'dl'),
-    (namespaces['html'], 'dt'),
-    (namespaces['html'], 'em'),
-    (namespaces['html'], 'event-source'),
-    (namespaces['html'], 'fieldset'),
-    (namespaces['html'], 'figcaption'),
-    (namespaces['html'], 'figure'),
-    (namespaces['html'], 'footer'),
-    (namespaces['html'], 'font'),
-    (namespaces['html'], 'form'),
-    (namespaces['html'], 'header'),
-    (namespaces['html'], 'h1'),
-    (namespaces['html'], 'h2'),
-    (namespaces['html'], 'h3'),
-    (namespaces['html'], 'h4'),
-    (namespaces['html'], 'h5'),
-    (namespaces['html'], 'h6'),
-    (namespaces['html'], 'hr'),
-    (namespaces['html'], 'i'),
-    (namespaces['html'], 'img'),
-    (namespaces['html'], 'input'),
-    (namespaces['html'], 'ins'),
-    (namespaces['html'], 'keygen'),
-    (namespaces['html'], 'kbd'),
-    (namespaces['html'], 'label'),
-    (namespaces['html'], 'legend'),
-    (namespaces['html'], 'li'),
-    (namespaces['html'], 'm'),
-    (namespaces['html'], 'map'),
-    (namespaces['html'], 'menu'),
-    (namespaces['html'], 'meter'),
-    (namespaces['html'], 'multicol'),
-    (namespaces['html'], 'nav'),
-    (namespaces['html'], 'nextid'),
-    (namespaces['html'], 'ol'),
-    (namespaces['html'], 'output'),
-    (namespaces['html'], 'optgroup'),
-    (namespaces['html'], 'option'),
-    (namespaces['html'], 'p'),
-    (namespaces['html'], 'pre'),
-    (namespaces['html'], 'progress'),
-    (namespaces['html'], 'q'),
-    (namespaces['html'], 's'),
-    (namespaces['html'], 'samp'),
-    (namespaces['html'], 'section'),
-    (namespaces['html'], 'select'),
-    (namespaces['html'], 'small'),
-    (namespaces['html'], 'sound'),
-    (namespaces['html'], 'source'),
-    (namespaces['html'], 'spacer'),
-    (namespaces['html'], 'span'),
-    (namespaces['html'], 'strike'),
-    (namespaces['html'], 'strong'),
-    (namespaces['html'], 'sub'),
-    (namespaces['html'], 'sup'),
-    (namespaces['html'], 'table'),
-    (namespaces['html'], 'tbody'),
-    (namespaces['html'], 'td'),
-    (namespaces['html'], 'textarea'),
-    (namespaces['html'], 'time'),
-    (namespaces['html'], 'tfoot'),
-    (namespaces['html'], 'th'),
-    (namespaces['html'], 'thead'),
-    (namespaces['html'], 'tr'),
-    (namespaces['html'], 'tt'),
-    (namespaces['html'], 'u'),
-    (namespaces['html'], 'ul'),
-    (namespaces['html'], 'var'),
-    (namespaces['html'], 'video'),
-    (namespaces['mathml'], 'maction'),
-    (namespaces['mathml'], 'math'),
-    (namespaces['mathml'], 'merror'),
-    (namespaces['mathml'], 'mfrac'),
-    (namespaces['mathml'], 'mi'),
-    (namespaces['mathml'], 'mmultiscripts'),
-    (namespaces['mathml'], 'mn'),
-    (namespaces['mathml'], 'mo'),
-    (namespaces['mathml'], 'mover'),
-    (namespaces['mathml'], 'mpadded'),
-    (namespaces['mathml'], 'mphantom'),
-    (namespaces['mathml'], 'mprescripts'),
-    (namespaces['mathml'], 'mroot'),
-    (namespaces['mathml'], 'mrow'),
-    (namespaces['mathml'], 'mspace'),
-    (namespaces['mathml'], 'msqrt'),
-    (namespaces['mathml'], 'mstyle'),
-    (namespaces['mathml'], 'msub'),
-    (namespaces['mathml'], 'msubsup'),
-    (namespaces['mathml'], 'msup'),
-    (namespaces['mathml'], 'mtable'),
-    (namespaces['mathml'], 'mtd'),
-    (namespaces['mathml'], 'mtext'),
-    (namespaces['mathml'], 'mtr'),
-    (namespaces['mathml'], 'munder'),
-    (namespaces['mathml'], 'munderover'),
-    (namespaces['mathml'], 'none'),
-    (namespaces['svg'], 'a'),
-    (namespaces['svg'], 'animate'),
-    (namespaces['svg'], 'animateColor'),
-    (namespaces['svg'], 'animateMotion'),
-    (namespaces['svg'], 'animateTransform'),
-    (namespaces['svg'], 'clipPath'),
-    (namespaces['svg'], 'circle'),
-    (namespaces['svg'], 'defs'),
-    (namespaces['svg'], 'desc'),
-    (namespaces['svg'], 'ellipse'),
-    (namespaces['svg'], 'font-face'),
-    (namespaces['svg'], 'font-face-name'),
-    (namespaces['svg'], 'font-face-src'),
-    (namespaces['svg'], 'g'),
-    (namespaces['svg'], 'glyph'),
-    (namespaces['svg'], 'hkern'),
-    (namespaces['svg'], 'linearGradient'),
-    (namespaces['svg'], 'line'),
-    (namespaces['svg'], 'marker'),
-    (namespaces['svg'], 'metadata'),
-    (namespaces['svg'], 'missing-glyph'),
-    (namespaces['svg'], 'mpath'),
-    (namespaces['svg'], 'path'),
-    (namespaces['svg'], 'polygon'),
-    (namespaces['svg'], 'polyline'),
-    (namespaces['svg'], 'radialGradient'),
-    (namespaces['svg'], 'rect'),
-    (namespaces['svg'], 'set'),
-    (namespaces['svg'], 'stop'),
-    (namespaces['svg'], 'svg'),
-    (namespaces['svg'], 'switch'),
-    (namespaces['svg'], 'text'),
-    (namespaces['svg'], 'title'),
-    (namespaces['svg'], 'tspan'),
-    (namespaces['svg'], 'use'),
-))
-
-allowed_attributes = frozenset((
-    # HTML attributes
-    (None, 'abbr'),
-    (None, 'accept'),
-    (None, 'accept-charset'),
-    (None, 'accesskey'),
-    (None, 'action'),
-    (None, 'align'),
-    (None, 'alt'),
-    (None, 'autocomplete'),
-    (None, 'autofocus'),
-    (None, 'axis'),
-    (None, 'background'),
-    (None, 'balance'),
-    (None, 'bgcolor'),
-    (None, 'bgproperties'),
-    (None, 'border'),
-    (None, 'bordercolor'),
-    (None, 'bordercolordark'),
-    (None, 'bordercolorlight'),
-    (None, 'bottompadding'),
-    (None, 'cellpadding'),
-    (None, 'cellspacing'),
-    (None, 'ch'),
-    (None, 'challenge'),
-    (None, 'char'),
-    (None, 'charoff'),
-    (None, 'choff'),
-    (None, 'charset'),
-    (None, 'checked'),
-    (None, 'cite'),
-    (None, 'class'),
-    (None, 'clear'),
-    (None, 'color'),
-    (None, 'cols'),
-    (None, 'colspan'),
-    (None, 'compact'),
-    (None, 'contenteditable'),
-    (None, 'controls'),
-    (None, 'coords'),
-    (None, 'data'),
-    (None, 'datafld'),
-    (None, 'datapagesize'),
-    (None, 'datasrc'),
-    (None, 'datetime'),
-    (None, 'default'),
-    (None, 'delay'),
-    (None, 'dir'),
-    (None, 'disabled'),
-    (None, 'draggable'),
-    (None, 'dynsrc'),
-    (None, 'enctype'),
-    (None, 'end'),
-    (None, 'face'),
-    (None, 'for'),
-    (None, 'form'),
-    (None, 'frame'),
-    (None, 'galleryimg'),
-    (None, 'gutter'),
-    (None, 'headers'),
-    (None, 'height'),
-    (None, 'hidefocus'),
-    (None, 'hidden'),
-    (None, 'high'),
-    (None, 'href'),
-    (None, 'hreflang'),
-    (None, 'hspace'),
-    (None, 'icon'),
-    (None, 'id'),
-    (None, 'inputmode'),
-    (None, 'ismap'),
-    (None, 'keytype'),
-    (None, 'label'),
-    (None, 'leftspacing'),
-    (None, 'lang'),
-    (None, 'list'),
-    (None, 'longdesc'),
-    (None, 'loop'),
-    (None, 'loopcount'),
-    (None, 'loopend'),
-    (None, 'loopstart'),
-    (None, 'low'),
-    (None, 'lowsrc'),
-    (None, 'max'),
-    (None, 'maxlength'),
-    (None, 'media'),
-    (None, 'method'),
-    (None, 'min'),
-    (None, 'multiple'),
-    (None, 'name'),
-    (None, 'nohref'),
-    (None, 'noshade'),
-    (None, 'nowrap'),
-    (None, 'open'),
-    (None, 'optimum'),
-    (None, 'pattern'),
-    (None, 'ping'),
-    (None, 'point-size'),
-    (None, 'poster'),
-    (None, 'pqg'),
-    (None, 'preload'),
-    (None, 'prompt'),
-    (None, 'radiogroup'),
-    (None, 'readonly'),
-    (None, 'rel'),
-    (None, 'repeat-max'),
-    (None, 'repeat-min'),
-    (None, 'replace'),
-    (None, 'required'),
-    (None, 'rev'),
-    (None, 'rightspacing'),
-    (None, 'rows'),
-    (None, 'rowspan'),
-    (None, 'rules'),
-    (None, 'scope'),
-    (None, 'selected'),
-    (None, 'shape'),
-    (None, 'size'),
-    (None, 'span'),
-    (None, 'src'),
-    (None, 'start'),
-    (None, 'step'),
-    (None, 'style'),
-    (None, 'summary'),
-    (None, 'suppress'),
-    (None, 'tabindex'),
-    (None, 'target'),
-    (None, 'template'),
-    (None, 'title'),
-    (None, 'toppadding'),
-    (None, 'type'),
-    (None, 'unselectable'),
-    (None, 'usemap'),
-    (None, 'urn'),
-    (None, 'valign'),
-    (None, 'value'),
-    (None, 'variable'),
-    (None, 'volume'),
-    (None, 'vspace'),
-    (None, 'vrml'),
-    (None, 'width'),
-    (None, 'wrap'),
-    (namespaces['xml'], 'lang'),
-    # MathML attributes
-    (None, 'actiontype'),
-    (None, 'align'),
-    (None, 'columnalign'),
-    (None, 'columnalign'),
-    (None, 'columnalign'),
-    (None, 'columnlines'),
-    (None, 'columnspacing'),
-    (None, 'columnspan'),
-    (None, 'depth'),
-    (None, 'display'),
-    (None, 'displaystyle'),
-    (None, 'equalcolumns'),
-    (None, 'equalrows'),
-    (None, 'fence'),
-    (None, 'fontstyle'),
-    (None, 'fontweight'),
-    (None, 'frame'),
-    (None, 'height'),
-    (None, 'linethickness'),
-    (None, 'lspace'),
-    (None, 'mathbackground'),
-    (None, 'mathcolor'),
-    (None, 'mathvariant'),
-    (None, 'mathvariant'),
-    (None, 'maxsize'),
-    (None, 'minsize'),
-    (None, 'other'),
-    (None, 'rowalign'),
-    (None, 'rowalign'),
-    (None, 'rowalign'),
-    (None, 'rowlines'),
-    (None, 'rowspacing'),
-    (None, 'rowspan'),
-    (None, 'rspace'),
-    (None, 'scriptlevel'),
-    (None, 'selection'),
-    (None, 'separator'),
-    (None, 'stretchy'),
-    (None, 'width'),
-    (None, 'width'),
-    (namespaces['xlink'], 'href'),
-    (namespaces['xlink'], 'show'),
-    (namespaces['xlink'], 'type'),
-    # SVG attributes
-    (None, 'accent-height'),
-    (None, 'accumulate'),
-    (None, 'additive'),
-    (None, 'alphabetic'),
-    (None, 'arabic-form'),
-    (None, 'ascent'),
-    (None, 'attributeName'),
-    (None, 'attributeType'),
-    (None, 'baseProfile'),
-    (None, 'bbox'),
-    (None, 'begin'),
-    (None, 'by'),
-    (None, 'calcMode'),
-    (None, 'cap-height'),
-    (None, 'class'),
-    (None, 'clip-path'),
-    (None, 'color'),
-    (None, 'color-rendering'),
-    (None, 'content'),
-    (None, 'cx'),
-    (None, 'cy'),
-    (None, 'd'),
-    (None, 'dx'),
-    (None, 'dy'),
-    (None, 'descent'),
-    (None, 'display'),
-    (None, 'dur'),
-    (None, 'end'),
-    (None, 'fill'),
-    (None, 'fill-opacity'),
-    (None, 'fill-rule'),
-    (None, 'font-family'),
-    (None, 'font-size'),
-    (None, 'font-stretch'),
-    (None, 'font-style'),
-    (None, 'font-variant'),
-    (None, 'font-weight'),
-    (None, 'from'),
-    (None, 'fx'),
-    (None, 'fy'),
-    (None, 'g1'),
-    (None, 'g2'),
-    (None, 'glyph-name'),
-    (None, 'gradientUnits'),
-    (None, 'hanging'),
-    (None, 'height'),
-    (None, 'horiz-adv-x'),
-    (None, 'horiz-origin-x'),
-    (None, 'id'),
-    (None, 'ideographic'),
-    (None, 'k'),
-    (None, 'keyPoints'),
-    (None, 'keySplines'),
-    (None, 'keyTimes'),
-    (None, 'lang'),
-    (None, 'marker-end'),
-    (None, 'marker-mid'),
-    (None, 'marker-start'),
-    (None, 'markerHeight'),
-    (None, 'markerUnits'),
-    (None, 'markerWidth'),
-    (None, 'mathematical'),
-    (None, 'max'),
-    (None, 'min'),
-    (None, 'name'),
-    (None, 'offset'),
-    (None, 'opacity'),
-    (None, 'orient'),
-    (None, 'origin'),
-    (None, 'overline-position'),
-    (None, 'overline-thickness'),
-    (None, 'panose-1'),
-    (None, 'path'),
-    (None, 'pathLength'),
-    (None, 'points'),
-    (None, 'preserveAspectRatio'),
-    (None, 'r'),
-    (None, 'refX'),
-    (None, 'refY'),
-    (None, 'repeatCount'),
-    (None, 'repeatDur'),
-    (None, 'requiredExtensions'),
-    (None, 'requiredFeatures'),
-    (None, 'restart'),
-    (None, 'rotate'),
-    (None, 'rx'),
-    (None, 'ry'),
-    (None, 'slope'),
-    (None, 'stemh'),
-    (None, 'stemv'),
-    (None, 'stop-color'),
-    (None, 'stop-opacity'),
-    (None, 'strikethrough-position'),
-    (None, 'strikethrough-thickness'),
-    (None, 'stroke'),
-    (None, 'stroke-dasharray'),
-    (None, 'stroke-dashoffset'),
-    (None, 'stroke-linecap'),
-    (None, 'stroke-linejoin'),
-    (None, 'stroke-miterlimit'),
-    (None, 'stroke-opacity'),
-    (None, 'stroke-width'),
-    (None, 'systemLanguage'),
-    (None, 'target'),
-    (None, 'text-anchor'),
-    (None, 'to'),
-    (None, 'transform'),
-    (None, 'type'),
-    (None, 'u1'),
-    (None, 'u2'),
-    (None, 'underline-position'),
-    (None, 'underline-thickness'),
-    (None, 'unicode'),
-    (None, 'unicode-range'),
-    (None, 'units-per-em'),
-    (None, 'values'),
-    (None, 'version'),
-    (None, 'viewBox'),
-    (None, 'visibility'),
-    (None, 'width'),
-    (None, 'widths'),
-    (None, 'x'),
-    (None, 'x-height'),
-    (None, 'x1'),
-    (None, 'x2'),
-    (namespaces['xlink'], 'actuate'),
-    (namespaces['xlink'], 'arcrole'),
-    (namespaces['xlink'], 'href'),
-    (namespaces['xlink'], 'role'),
-    (namespaces['xlink'], 'show'),
-    (namespaces['xlink'], 'title'),
-    (namespaces['xlink'], 'type'),
-    (namespaces['xml'], 'base'),
-    (namespaces['xml'], 'lang'),
-    (namespaces['xml'], 'space'),
-    (None, 'y'),
-    (None, 'y1'),
-    (None, 'y2'),
-    (None, 'zoomAndPan'),
-))
-
-attr_val_is_uri = frozenset((
-    (None, 'href'),
-    (None, 'src'),
-    (None, 'cite'),
-    (None, 'action'),
-    (None, 'longdesc'),
-    (None, 'poster'),
-    (None, 'background'),
-    (None, 'datasrc'),
-    (None, 'dynsrc'),
-    (None, 'lowsrc'),
-    (None, 'ping'),
-    (namespaces['xlink'], 'href'),
-    (namespaces['xml'], 'base'),
-))
-
-svg_attr_val_allows_ref = frozenset((
-    (None, 'clip-path'),
-    (None, 'color-profile'),
-    (None, 'cursor'),
-    (None, 'fill'),
-    (None, 'filter'),
-    (None, 'marker'),
-    (None, 'marker-start'),
-    (None, 'marker-mid'),
-    (None, 'marker-end'),
-    (None, 'mask'),
-    (None, 'stroke'),
-))
-
-svg_allow_local_href = frozenset((
-    (None, 'altGlyph'),
-    (None, 'animate'),
-    (None, 'animateColor'),
-    (None, 'animateMotion'),
-    (None, 'animateTransform'),
-    (None, 'cursor'),
-    (None, 'feImage'),
-    (None, 'filter'),
-    (None, 'linearGradient'),
-    (None, 'pattern'),
-    (None, 'radialGradient'),
-    (None, 'textpath'),
-    (None, 'tref'),
-    (None, 'set'),
-    (None, 'use')
-))
-
-allowed_css_properties = frozenset((
-    'azimuth',
-    'background-color',
-    'border-bottom-color',
-    'border-collapse',
-    'border-color',
-    'border-left-color',
-    'border-right-color',
-    'border-top-color',
-    'clear',
-    'color',
-    'cursor',
-    'direction',
-    'display',
-    'elevation',
-    'float',
-    'font',
-    'font-family',
-    'font-size',
-    'font-style',
-    'font-variant',
-    'font-weight',
-    'height',
-    'letter-spacing',
-    'line-height',
-    'overflow',
-    'pause',
-    'pause-after',
-    'pause-before',
-    'pitch',
-    'pitch-range',
-    'richness',
-    'speak',
-    'speak-header',
-    'speak-numeral',
-    'speak-punctuation',
-    'speech-rate',
-    'stress',
-    'text-align',
-    'text-decoration',
-    'text-indent',
-    'unicode-bidi',
-    'vertical-align',
-    'voice-family',
-    'volume',
-    'white-space',
-    'width',
-))
-
-allowed_css_keywords = frozenset((
-    'auto',
-    'aqua',
-    'black',
-    'block',
-    'blue',
-    'bold',
-    'both',
-    'bottom',
-    'brown',
-    'center',
-    'collapse',
-    'dashed',
-    'dotted',
-    'fuchsia',
-    'gray',
-    'green',
-    '!important',
-    'italic',
-    'left',
-    'lime',
-    'maroon',
-    'medium',
-    'none',
-    'navy',
-    'normal',
-    'nowrap',
-    'olive',
-    'pointer',
-    'purple',
-    'red',
-    'right',
-    'solid',
-    'silver',
-    'teal',
-    'top',
-    'transparent',
-    'underline',
-    'white',
-    'yellow',
-))
-
-allowed_svg_properties = frozenset((
-    'fill',
-    'fill-opacity',
-    'fill-rule',
-    'stroke',
-    'stroke-width',
-    'stroke-linecap',
-    'stroke-linejoin',
-    'stroke-opacity',
-))
-
-allowed_protocols = frozenset((
-    'ed2k',
-    'ftp',
-    'http',
-    'https',
-    'irc',
-    'mailto',
-    'news',
-    'gopher',
-    'nntp',
-    'telnet',
-    'webcal',
-    'xmpp',
-    'callto',
-    'feed',
-    'urn',
-    'aim',
-    'rsync',
-    'tag',
-    'ssh',
-    'sftp',
-    'rtsp',
-    'afs',
-    'data',
-))
-
-allowed_content_types = frozenset((
-    'image/png',
-    'image/jpeg',
-    'image/gif',
-    'image/webp',
-    'image/bmp',
-    'text/plain',
-))
-
-
-data_content_type = re.compile(r'''
-                                ^
-                                # Match a content type /
-                                (?P[-a-zA-Z0-9.]+/[-a-zA-Z0-9.]+)
-                                # Match any character set and encoding
-                                (?:(?:;charset=(?:[-a-zA-Z0-9]+)(?:;(?:base64))?)
-                                  |(?:;(?:base64))?(?:;charset=(?:[-a-zA-Z0-9]+))?)
-                                # Assume the rest is data
-                                ,.*
-                                $
-                                ''',
-                               re.VERBOSE)
-
-
-class Filter(base.Filter):
-    """Sanitizes token stream of XHTML+MathML+SVG and of inline style attributes"""
-    def __init__(self,
-                 source,
-                 allowed_elements=allowed_elements,
-                 allowed_attributes=allowed_attributes,
-                 allowed_css_properties=allowed_css_properties,
-                 allowed_css_keywords=allowed_css_keywords,
-                 allowed_svg_properties=allowed_svg_properties,
-                 allowed_protocols=allowed_protocols,
-                 allowed_content_types=allowed_content_types,
-                 attr_val_is_uri=attr_val_is_uri,
-                 svg_attr_val_allows_ref=svg_attr_val_allows_ref,
-                 svg_allow_local_href=svg_allow_local_href):
-        """Creates a Filter
-
-        :arg allowed_elements: set of elements to allow--everything else will
-            be escaped
-
-        :arg allowed_attributes: set of attributes to allow in
-            elements--everything else will be stripped
-
-        :arg allowed_css_properties: set of CSS properties to allow--everything
-            else will be stripped
-
-        :arg allowed_css_keywords: set of CSS keywords to allow--everything
-            else will be stripped
-
-        :arg allowed_svg_properties: set of SVG properties to allow--everything
-            else will be removed
-
-        :arg allowed_protocols: set of allowed protocols for URIs
-
-        :arg allowed_content_types: set of allowed content types for ``data`` URIs.
-
-        :arg attr_val_is_uri: set of attributes that have URI values--values
-            that have a scheme not listed in ``allowed_protocols`` are removed
-
-        :arg svg_attr_val_allows_ref: set of SVG attributes that can have
-            references
-
-        :arg svg_allow_local_href: set of SVG elements that can have local
-            hrefs--these are removed
-
-        """
-        super(Filter, self).__init__(source)
-        self.allowed_elements = allowed_elements
-        self.allowed_attributes = allowed_attributes
-        self.allowed_css_properties = allowed_css_properties
-        self.allowed_css_keywords = allowed_css_keywords
-        self.allowed_svg_properties = allowed_svg_properties
-        self.allowed_protocols = allowed_protocols
-        self.allowed_content_types = allowed_content_types
-        self.attr_val_is_uri = attr_val_is_uri
-        self.svg_attr_val_allows_ref = svg_attr_val_allows_ref
-        self.svg_allow_local_href = svg_allow_local_href
-
-    def __iter__(self):
-        for token in base.Filter.__iter__(self):
-            token = self.sanitize_token(token)
-            if token:
-                yield token
-
-    # Sanitize the +html+, escaping all elements not in ALLOWED_ELEMENTS, and
-    # stripping out all attributes not in ALLOWED_ATTRIBUTES. Style attributes
-    # are parsed, and a restricted set, specified by ALLOWED_CSS_PROPERTIES and
-    # ALLOWED_CSS_KEYWORDS, are allowed through. attributes in ATTR_VAL_IS_URI
-    # are scanned, and only URI schemes specified in ALLOWED_PROTOCOLS are
-    # allowed.
-    #
-    #   sanitize_html('')
-    #    => <script> do_nasty_stuff() </script>
-    #   sanitize_html('Click here for $100')
-    #    => Click here for $100
-    def sanitize_token(self, token):
-
-        # accommodate filters which use token_type differently
-        token_type = token["type"]
-        if token_type in ("StartTag", "EndTag", "EmptyTag"):
-            name = token["name"]
-            namespace = token["namespace"]
-            if ((namespace, name) in self.allowed_elements or
-                (namespace is None and
-                 (namespaces["html"], name) in self.allowed_elements)):
-                return self.allowed_token(token)
-            else:
-                return self.disallowed_token(token)
-        elif token_type == "Comment":
-            pass
-        else:
-            return token
-
-    def allowed_token(self, token):
-        if "data" in token:
-            attrs = token["data"]
-            attr_names = set(attrs.keys())
-
-            # Remove forbidden attributes
-            for to_remove in (attr_names - self.allowed_attributes):
-                del token["data"][to_remove]
-                attr_names.remove(to_remove)
-
-            # Remove attributes with disallowed URL values
-            for attr in (attr_names & self.attr_val_is_uri):
-                assert attr in attrs
-                # I don't have a clue where this regexp comes from or why it matches those
-                # characters, nor why we call unescape. I just know it's always been here.
-                # Should you be worried by this comment in a sanitizer? Yes. On the other hand, all
-                # this will do is remove *more* than it otherwise would.
-                val_unescaped = re.sub("[`\x00-\x20\x7f-\xa0\\s]+", '',
-                                       unescape(attrs[attr])).lower()
-                # remove replacement characters from unescaped characters
-                val_unescaped = val_unescaped.replace("\ufffd", "")
-                try:
-                    uri = urlparse.urlparse(val_unescaped)
-                except ValueError:
-                    uri = None
-                    del attrs[attr]
-                if uri and uri.scheme:
-                    if uri.scheme not in self.allowed_protocols:
-                        del attrs[attr]
-                    if uri.scheme == 'data':
-                        m = data_content_type.match(uri.path)
-                        if not m:
-                            del attrs[attr]
-                        elif m.group('content_type') not in self.allowed_content_types:
-                            del attrs[attr]
-
-            for attr in self.svg_attr_val_allows_ref:
-                if attr in attrs:
-                    attrs[attr] = re.sub(r'url\s*\(\s*[^#\s][^)]+?\)',
-                                         ' ',
-                                         unescape(attrs[attr]))
-            if (token["name"] in self.svg_allow_local_href and
-                (namespaces['xlink'], 'href') in attrs and re.search(r'^\s*[^#\s].*',
-                                                                     attrs[(namespaces['xlink'], 'href')])):
-                del attrs[(namespaces['xlink'], 'href')]
-            if (None, 'style') in attrs:
-                attrs[(None, 'style')] = self.sanitize_css(attrs[(None, 'style')])
-            token["data"] = attrs
-        return token
-
-    def disallowed_token(self, token):
-        token_type = token["type"]
-        if token_type == "EndTag":
-            token["data"] = "" % token["name"]
-        elif token["data"]:
-            assert token_type in ("StartTag", "EmptyTag")
-            attrs = []
-            for (ns, name), v in token["data"].items():
-                attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v)))
-            token["data"] = "<%s%s>" % (token["name"], ''.join(attrs))
-        else:
-            token["data"] = "<%s>" % token["name"]
-        if token.get("selfClosing"):
-            token["data"] = token["data"][:-1] + "/>"
-
-        token["type"] = "Characters"
-
-        del token["name"]
-        return token
-
-    def sanitize_css(self, style):
-        # disallow urls
-        style = re.compile(r'url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ', style)
-
-        # gauntlet
-        if not re.match(r"""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
-            return ''
-        if not re.match(r"^\s*([-\w]+\s*:[^:;]*(;\s*|$))*$", style):
-            return ''
-
-        clean = []
-        for prop, value in re.findall(r"([-\w]+)\s*:\s*([^:;]*)", style):
-            if not value:
-                continue
-            if prop.lower() in self.allowed_css_properties:
-                clean.append(prop + ': ' + value + ';')
-            elif prop.split('-')[0].lower() in ['background', 'border', 'margin',
-                                                'padding']:
-                for keyword in value.split():
-                    if keyword not in self.allowed_css_keywords and \
-                            not re.match(r"^(#[0-9a-fA-F]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$", keyword):  # noqa
-                        break
-                else:
-                    clean.append(prop + ': ' + value + ';')
-            elif prop.lower() in self.allowed_svg_properties:
-                clean.append(prop + ': ' + value + ';')
-
-        return ' '.join(clean)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py
deleted file mode 100644
index 0d12584..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/filters/whitespace.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-
-import re
-
-from . import base
-from ..constants import rcdataElements, spaceCharacters
-spaceCharacters = "".join(spaceCharacters)
-
-SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)
-
-
-class Filter(base.Filter):
-    """Collapses whitespace except in pre, textarea, and script elements"""
-    spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))
-
-    def __iter__(self):
-        preserve = 0
-        for token in base.Filter.__iter__(self):
-            type = token["type"]
-            if type == "StartTag" \
-                    and (preserve or token["name"] in self.spacePreserveElements):
-                preserve += 1
-
-            elif type == "EndTag" and preserve:
-                preserve -= 1
-
-            elif not preserve and type == "SpaceCharacters" and token["data"]:
-                # Test on token["data"] above to not introduce spaces where there were not
-                token["data"] = " "
-
-            elif not preserve and type == "Characters":
-                token["data"] = collapse_spaces(token["data"])
-
-            yield token
-
-
-def collapse_spaces(text):
-    return SPACES_REGEX.sub(' ', text)
diff --git a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py b/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py
deleted file mode 100644
index ae41a13..0000000
--- a/.tox/py37-normal/lib/python3.7/site-packages/pip/_vendor/html5lib/html5parser.py
+++ /dev/null
@@ -1,2791 +0,0 @@
-from __future__ import absolute_import, division, unicode_literals
-from pip._vendor.six import with_metaclass, viewkeys
-
-import types
-from collections import OrderedDict
-
-from . import _inputstream
-from . import _tokenizer
-
-from . import treebuilders
-from .treebuilders.base import Marker
-
-from . import _utils
-from .constants import (
-    spaceCharacters, asciiUpper2Lower,
-    specialElements, headingElements, cdataElements, rcdataElements,
-    tokenTypes, tagTokenTypes,
-    namespaces,
-    htmlIntegrationPointElements, mathmlTextIntegrationPointElements,
-    adjustForeignAttributes as adjustForeignAttributesMap,
-    adjustMathMLAttributes, adjustSVGAttributes,
-    E,
-    _ReparseException
-)
-
-
-def parse(doc, treebuilder="etree", namespaceHTMLElements=True, **kwargs):
-    """Parse an HTML document as a string or file-like object into a tree
-
-    :arg doc: the document to parse as a string or file-like object
-
-    :arg treebuilder: the treebuilder to use when parsing
-
-    :arg namespaceHTMLElements: whether or not to namespace HTML elements
-
-    :returns: parsed tree
-
-    Example:
-
-    >>> from html5lib.html5parser import parse
-    >>> parse('

This is a doc

') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parse(doc, **kwargs) - - -def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElements=True, **kwargs): - """Parse an HTML fragment as a string or file-like object into a tree - - :arg doc: the fragment to parse as a string or file-like object - - :arg container: the container context to parse the fragment in - - :arg treebuilder: the treebuilder to use when parsing - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import parseFragment - >>> parseFragment('this is a fragment') - - - """ - tb = treebuilders.getTreeBuilder(treebuilder) - p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) - return p.parseFragment(doc, container=container, **kwargs) - - -def method_decorator_metaclass(function): - class Decorated(type): - def __new__(meta, classname, bases, classDict): - for attributeName, attribute in classDict.items(): - if isinstance(attribute, types.FunctionType): - attribute = function(attribute) - - classDict[attributeName] = attribute - return type.__new__(meta, classname, bases, classDict) - return Decorated - - -class HTMLParser(object): - """HTML parser - - Generates a tree structure from a stream of (possibly malformed) HTML. - - """ - - def __init__(self, tree=None, strict=False, namespaceHTMLElements=True, debug=False): - """ - :arg tree: a treebuilder class controlling the type of tree that will be - returned. Built in treebuilders can be accessed through - html5lib.treebuilders.getTreeBuilder(treeType) - - :arg strict: raise an exception when a parse error is encountered - - :arg namespaceHTMLElements: whether or not to namespace HTML elements - - :arg debug: whether or not to enable debug mode which logs things - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() # generates parser with etree builder - >>> parser = HTMLParser('lxml', strict=True) # generates parser with lxml builder which is strict - - """ - - # Raise an exception on the first error encountered - self.strict = strict - - if tree is None: - tree = treebuilders.getTreeBuilder("etree") - self.tree = tree(namespaceHTMLElements) - self.errors = [] - - self.phases = dict([(name, cls(self, self.tree)) for name, cls in - getPhases(debug).items()]) - - def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): - - self.innerHTMLMode = innerHTML - self.container = container - self.scripting = scripting - self.tokenizer = _tokenizer.HTMLTokenizer(stream, parser=self, **kwargs) - self.reset() - - try: - self.mainLoop() - except _ReparseException: - self.reset() - self.mainLoop() - - def reset(self): - self.tree.reset() - self.firstStartTag = False - self.errors = [] - self.log = [] # only used with debug mode - # "quirks" / "limited quirks" / "no quirks" - self.compatMode = "no quirks" - - if self.innerHTMLMode: - self.innerHTML = self.container.lower() - - if self.innerHTML in cdataElements: - self.tokenizer.state = self.tokenizer.rcdataState - elif self.innerHTML in rcdataElements: - self.tokenizer.state = self.tokenizer.rawtextState - elif self.innerHTML == 'plaintext': - self.tokenizer.state = self.tokenizer.plaintextState - else: - # state already is data state - # self.tokenizer.state = self.tokenizer.dataState - pass - self.phase = self.phases["beforeHtml"] - self.phase.insertHtmlElement() - self.resetInsertionMode() - else: - self.innerHTML = False # pylint:disable=redefined-variable-type - self.phase = self.phases["initial"] - - self.lastPhase = None - - self.beforeRCDataPhase = None - - self.framesetOK = True - - @property - def documentEncoding(self): - """Name of the character encoding that was used to decode the input stream, or - :obj:`None` if that is not determined yet - - """ - if not hasattr(self, 'tokenizer'): - return None - return self.tokenizer.stream.charEncoding[0].name - - def isHTMLIntegrationPoint(self, element): - if (element.name == "annotation-xml" and - element.namespace == namespaces["mathml"]): - return ("encoding" in element.attributes and - element.attributes["encoding"].translate( - asciiUpper2Lower) in - ("text/html", "application/xhtml+xml")) - else: - return (element.namespace, element.name) in htmlIntegrationPointElements - - def isMathMLTextIntegrationPoint(self, element): - return (element.namespace, element.name) in mathmlTextIntegrationPointElements - - def mainLoop(self): - CharactersToken = tokenTypes["Characters"] - SpaceCharactersToken = tokenTypes["SpaceCharacters"] - StartTagToken = tokenTypes["StartTag"] - EndTagToken = tokenTypes["EndTag"] - CommentToken = tokenTypes["Comment"] - DoctypeToken = tokenTypes["Doctype"] - ParseErrorToken = tokenTypes["ParseError"] - - for token in self.normalizedTokens(): - prev_token = None - new_token = token - while new_token is not None: - prev_token = new_token - currentNode = self.tree.openElements[-1] if self.tree.openElements else None - currentNodeNamespace = currentNode.namespace if currentNode else None - currentNodeName = currentNode.name if currentNode else None - - type = new_token["type"] - - if type == ParseErrorToken: - self.parseError(new_token["data"], new_token.get("datavars", {})) - new_token = None - else: - if (len(self.tree.openElements) == 0 or - currentNodeNamespace == self.tree.defaultNamespace or - (self.isMathMLTextIntegrationPoint(currentNode) and - ((type == StartTagToken and - token["name"] not in frozenset(["mglyph", "malignmark"])) or - type in (CharactersToken, SpaceCharactersToken))) or - (currentNodeNamespace == namespaces["mathml"] and - currentNodeName == "annotation-xml" and - type == StartTagToken and - token["name"] == "svg") or - (self.isHTMLIntegrationPoint(currentNode) and - type in (StartTagToken, CharactersToken, SpaceCharactersToken))): - phase = self.phase - else: - phase = self.phases["inForeignContent"] - - if type == CharactersToken: - new_token = phase.processCharacters(new_token) - elif type == SpaceCharactersToken: - new_token = phase.processSpaceCharacters(new_token) - elif type == StartTagToken: - new_token = phase.processStartTag(new_token) - elif type == EndTagToken: - new_token = phase.processEndTag(new_token) - elif type == CommentToken: - new_token = phase.processComment(new_token) - elif type == DoctypeToken: - new_token = phase.processDoctype(new_token) - - if (type == StartTagToken and prev_token["selfClosing"] and - not prev_token["selfClosingAcknowledged"]): - self.parseError("non-void-element-with-trailing-solidus", - {"name": prev_token["name"]}) - - # When the loop finishes it's EOF - reprocess = True - phases = [] - while reprocess: - phases.append(self.phase) - reprocess = self.phase.processEOF() - if reprocess: - assert self.phase not in phases - - def normalizedTokens(self): - for token in self.tokenizer: - yield self.normalizeToken(token) - - def parse(self, stream, *args, **kwargs): - """Parse a HTML document into a well-formed tree - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element). - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5parser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parse('

This is a doc

') - - - """ - self._parse(stream, False, None, *args, **kwargs) - return self.tree.getDocument() - - def parseFragment(self, stream, *args, **kwargs): - """Parse a HTML fragment into a well-formed tree fragment - - :arg container: name of the element we're setting the innerHTML - property if set to None, default to 'div' - - :arg stream: a file-like object or string containing the HTML to be parsed - - The optional encoding parameter must be a string that indicates - the encoding. If specified, that encoding will be used, - regardless of any BOM or later declaration (such as in a meta - element) - - :arg scripting: treat noscript elements as if JavaScript was turned on - - :returns: parsed tree - - Example: - - >>> from html5lib.html5libparser import HTMLParser - >>> parser = HTMLParser() - >>> parser.parseFragment('this is a fragment') - - - """ - self._parse(stream, True, *args, **kwargs) - return self.tree.getFragment() - - def parseError(self, errorcode="XXX-undefined-error", datavars=None): - # XXX The idea is to make errorcode mandatory. - if datavars is None: - datavars = {} - self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) - if self.strict: - raise ParseError(E[errorcode] % datavars) - - def normalizeToken(self, token): - # HTML5 specific normalizations to the token stream - if token["type"] == tokenTypes["StartTag"]: - raw = token["data"] - token["data"] = OrderedDict(raw) - if len(raw) > len(token["data"]): - # we had some duplicated attribute, fix so first wins - token["data"].update(raw[::-1]) - - return token - - def adjustMathMLAttributes(self, token): - adjust_attributes(token, adjustMathMLAttributes) - - def adjustSVGAttributes(self, token): - adjust_attributes(token, adjustSVGAttributes) - - def adjustForeignAttributes(self, token): - adjust_attributes(token, adjustForeignAttributesMap) - - def reparseTokenNormal(self, token): - # pylint:disable=unused-argument - self.parser.phase() - - def resetInsertionMode(self): - # The name of this method is mostly historical. (It's also used in the - # specification.) - last = False - newModes = { - "select": "inSelect", - "td": "inCell", - "th": "inCell", - "tr": "inRow", - "tbody": "inTableBody", - "thead": "inTableBody", - "tfoot": "inTableBody", - "caption": "inCaption", - "colgroup": "inColumnGroup", - "table": "inTable", - "head": "inBody", - "body": "inBody", - "frameset": "inFrameset", - "html": "beforeHead" - } - for node in self.tree.openElements[::-1]: - nodeName = node.name - new_phase = None - if node == self.tree.openElements[0]: - assert self.innerHTML - last = True - nodeName = self.innerHTML - # Check for conditions that should only happen in the innerHTML - # case - if nodeName in ("select", "colgroup", "head", "html"): - assert self.innerHTML - - if not last and node.namespace != self.tree.defaultNamespace: - continue - - if nodeName in newModes: - new_phase = self.phases[newModes[nodeName]] - break - elif last: - new_phase = self.phases["inBody"] - break - - self.phase = new_phase - - def parseRCDataRawtext(self, token, contentType): - # Generic RCDATA/RAWTEXT Parsing algorithm - assert contentType in ("RAWTEXT", "RCDATA") - - self.tree.insertElement(token) - - if contentType == "RAWTEXT": - self.tokenizer.state = self.tokenizer.rawtextState - else: - self.tokenizer.state = self.tokenizer.rcdataState - - self.originalPhase = self.phase - - self.phase = self.phases["text"] - - -@_utils.memoize -def getPhases(debug): - def log(function): - """Logger that records which phase processes each token""" - type_names = dict((value, key) for key, value in - tokenTypes.items()) - - def wrapped(self, *args, **kwargs): - if function.__name__.startswith("process") and len(args) > 0: - token = args[0] - try: - info = {"type": type_names[token['type']]} - except: - raise - if token['type'] in tagTokenTypes: - info["name"] = token['name'] - - self.parser.log.append((self.parser.tokenizer.state.__name__, - self.parser.phase.__class__.__name__, - self.__class__.__name__, - function.__name__, - info)) - return function(self, *args, **kwargs) - else: - return function(self, *args, **kwargs) - return wrapped - - def getMetaclass(use_metaclass, metaclass_func): - if use_metaclass: - return method_decorator_metaclass(metaclass_func) - else: - return type - - # pylint:disable=unused-argument - class Phase(with_metaclass(getMetaclass(debug, log))): - """Base class for helper object that implements each phase of processing - """ - - def __init__(self, parser, tree): - self.parser = parser - self.tree = tree - - def processEOF(self): - raise NotImplementedError - - def processComment(self, token): - # For most phases the following is correct. Where it's not it will be - # overridden. - self.tree.insertComment(token, self.tree.openElements[-1]) - - def processDoctype(self, token): - self.parser.parseError("unexpected-doctype") - - def processCharacters(self, token): - self.tree.insertText(token["data"]) - - def processSpaceCharacters(self, token): - self.tree.insertText(token["data"]) - - def processStartTag(self, token): - return self.startTagHandler[token["name"]](token) - - def startTagHtml(self, token): - if not self.parser.firstStartTag and token["name"] == "html": - self.parser.parseError("non-html-root") - # XXX Need a check here to see if the first start tag token emitted is - # this token... If it's not, invoke self.parser.parseError(). - for attr, value in token["data"].items(): - if attr not in self.tree.openElements[0].attributes: - self.tree.openElements[0].attributes[attr] = value - self.parser.firstStartTag = False - - def processEndTag(self, token): - return self.endTagHandler[token["name"]](token) - - class InitialPhase(Phase): - def processSpaceCharacters(self, token): - pass - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processDoctype(self, token): - name = token["name"] - publicId = token["publicId"] - systemId = token["systemId"] - correct = token["correct"] - - if (name != "html" or publicId is not None or - systemId is not None and systemId != "about:legacy-compat"): - self.parser.parseError("unknown-doctype") - - if publicId is None: - publicId = "" - - self.tree.insertDoctype(token) - - if publicId != "": - publicId = publicId.translate(asciiUpper2Lower) - - if (not correct or token["name"] != "html" or - publicId.startswith( - ("+//silmaril//dtd html pro v0r11 19970101//", - "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", - "-//as//dtd html 3.0 aswedit + extensions//", - "-//ietf//dtd html 2.0 level 1//", - "-//ietf//dtd html 2.0 level 2//", - "-//ietf//dtd html 2.0 strict level 1//", - "-//ietf//dtd html 2.0 strict level 2//", - "-//ietf//dtd html 2.0 strict//", - "-//ietf//dtd html 2.0//", - "-//ietf//dtd html 2.1e//", - "-//ietf//dtd html 3.0//", - "-//ietf//dtd html 3.2 final//", - "-//ietf//dtd html 3.2//", - "-//ietf//dtd html 3//", - "-//ietf//dtd html level 0//", - "-//ietf//dtd html level 1//", - "-//ietf//dtd html level 2//", - "-//ietf//dtd html level 3//", - "-//ietf//dtd html strict level 0//", - "-//ietf//dtd html strict level 1//", - "-//ietf//dtd html strict level 2//", - "-//ietf//dtd html strict level 3//", - "-//ietf//dtd html strict//", - "-//ietf//dtd html//", - "-//metrius//dtd metrius presentational//", - "-//microsoft//dtd internet explorer 2.0 html strict//", - "-//microsoft//dtd internet explorer 2.0 html//", - "-//microsoft//dtd internet explorer 2.0 tables//", - "-//microsoft//dtd internet explorer 3.0 html strict//", - "-//microsoft//dtd internet explorer 3.0 html//", - "-//microsoft//dtd internet explorer 3.0 tables//", - "-//netscape comm. corp.//dtd html//", - "-//netscape comm. corp.//dtd strict html//", - "-//o'reilly and associates//dtd html 2.0//", - "-//o'reilly and associates//dtd html extended 1.0//", - "-//o'reilly and associates//dtd html extended relaxed 1.0//", - "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", - "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", - "-//spyglass//dtd html 2.0 extended//", - "-//sq//dtd html 2.0 hotmetal + extensions//", - "-//sun microsystems corp.//dtd hotjava html//", - "-//sun microsystems corp.//dtd hotjava strict html//", - "-//w3c//dtd html 3 1995-03-24//", - "-//w3c//dtd html 3.2 draft//", - "-//w3c//dtd html 3.2 final//", - "-//w3c//dtd html 3.2//", - "-//w3c//dtd html 3.2s draft//", - "-//w3c//dtd html 4.0 frameset//", - "-//w3c//dtd html 4.0 transitional//", - "-//w3c//dtd html experimental 19960712//", - "-//w3c//dtd html experimental 970421//", - "-//w3c//dtd w3 html//", - "-//w3o//dtd w3 html 3.0//", - "-//webtechs//dtd mozilla html 2.0//", - "-//webtechs//dtd mozilla html//")) or - publicId in ("-//w3o//dtd w3 html strict 3.0//en//", - "-/w3c/dtd html 4.0 transitional/en", - "html") or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is None or - systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): - self.parser.compatMode = "quirks" - elif (publicId.startswith( - ("-//w3c//dtd xhtml 1.0 frameset//", - "-//w3c//dtd xhtml 1.0 transitional//")) or - publicId.startswith( - ("-//w3c//dtd html 4.01 frameset//", - "-//w3c//dtd html 4.01 transitional//")) and - systemId is not None): - self.parser.compatMode = "limited quirks" - - self.parser.phase = self.parser.phases["beforeHtml"] - - def anythingElse(self): - self.parser.compatMode = "quirks" - self.parser.phase = self.parser.phases["beforeHtml"] - - def processCharacters(self, token): - self.parser.parseError("expected-doctype-but-got-chars") - self.anythingElse() - return token - - def processStartTag(self, token): - self.parser.parseError("expected-doctype-but-got-start-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEndTag(self, token): - self.parser.parseError("expected-doctype-but-got-end-tag", - {"name": token["name"]}) - self.anythingElse() - return token - - def processEOF(self): - self.parser.parseError("expected-doctype-but-got-eof") - self.anythingElse() - return True - - class BeforeHtmlPhase(Phase): - # helper methods - def insertHtmlElement(self): - self.tree.insertRoot(impliedTagToken("html", "StartTag")) - self.parser.phase = self.parser.phases["beforeHead"] - - # other - def processEOF(self): - self.insertHtmlElement() - return True - - def processComment(self, token): - self.tree.insertComment(token, self.tree.document) - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.insertHtmlElement() - return token - - def processStartTag(self, token): - if token["name"] == "html": - self.parser.firstStartTag = True - self.insertHtmlElement() - return token - - def processEndTag(self, token): - if token["name"] not in ("head", "body", "html", "br"): - self.parser.parseError("unexpected-end-tag-before-html", - {"name": token["name"]}) - else: - self.insertHtmlElement() - return token - - class BeforeHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - (("head", "body", "html", "br"), self.endTagImplyHead) - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.startTagHead(impliedTagToken("head", "StartTag")) - return True - - def processSpaceCharacters(self, token): - pass - - def processCharacters(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.tree.insertElement(token) - self.tree.headPointer = self.tree.openElements[-1] - self.parser.phase = self.parser.phases["inHead"] - - def startTagOther(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagImplyHead(self, token): - self.startTagHead(impliedTagToken("head", "StartTag")) - return token - - def endTagOther(self, token): - self.parser.parseError("end-tag-after-implied-root", - {"name": token["name"]}) - - class InHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("title", self.startTagTitle), - (("noframes", "style"), self.startTagNoFramesStyle), - ("noscript", self.startTagNoscript), - ("script", self.startTagScript), - (("base", "basefont", "bgsound", "command", "link"), - self.startTagBaseLinkCommand), - ("meta", self.startTagMeta), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("head", self.endTagHead), - (("br", "html", "body"), self.endTagHtmlBodyBr) - ]) - self.endTagHandler.default = self.endTagOther - - # the real thing - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagHead(self, token): - self.parser.parseError("two-heads-are-not-better-than-one") - - def startTagBaseLinkCommand(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - def startTagMeta(self, token): - self.tree.insertElement(token) - self.tree.openElements.pop() - token["selfClosingAcknowledged"] = True - - attributes = token["data"] - if self.parser.tokenizer.stream.charEncoding[1] == "tentative": - if "charset" in attributes: - self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) - elif ("content" in attributes and - "http-equiv" in attributes and - attributes["http-equiv"].lower() == "content-type"): - # Encoding it as UTF-8 here is a hack, as really we should pass - # the abstract Unicode string, and just use the - # ContentAttrParser on that, but using UTF-8 allows all chars - # to be encoded and as a ASCII-superset works. - data = _inputstream.EncodingBytes(attributes["content"].encode("utf-8")) - parser = _inputstream.ContentAttrParser(data) - codec = parser.parse() - self.parser.tokenizer.stream.changeEncoding(codec) - - def startTagTitle(self, token): - self.parser.parseRCDataRawtext(token, "RCDATA") - - def startTagNoFramesStyle(self, token): - # Need to decide whether to implement the scripting-disabled case - self.parser.parseRCDataRawtext(token, "RAWTEXT") - - def startTagNoscript(self, token): - if self.parser.scripting: - self.parser.parseRCDataRawtext(token, "RAWTEXT") - else: - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inHeadNoscript"] - - def startTagScript(self, token): - self.tree.insertElement(token) - self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState - self.parser.originalPhase = self.parser.phase - self.parser.phase = self.parser.phases["text"] - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHead(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "head", "Expected head got %s" % node.name - self.parser.phase = self.parser.phases["afterHead"] - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.endTagHead(impliedTagToken("head")) - - class InHeadNoscriptPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("basefont", "bgsound", "link", "meta", "noframes", "style"), self.startTagBaseLinkCommand), - (("head", "noscript"), self.startTagHeadNoscript), - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("noscript", self.endTagNoscript), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.parser.parseError("eof-in-head-noscript") - self.anythingElse() - return True - - def processComment(self, token): - return self.parser.phases["inHead"].processComment(token) - - def processCharacters(self, token): - self.parser.parseError("char-in-head-noscript") - self.anythingElse() - return token - - def processSpaceCharacters(self, token): - return self.parser.phases["inHead"].processSpaceCharacters(token) - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBaseLinkCommand(self, token): - return self.parser.phases["inHead"].processStartTag(token) - - def startTagHeadNoscript(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagNoscript(self, token): - node = self.parser.tree.openElements.pop() - assert node.name == "noscript", "Expected noscript got %s" % node.name - self.parser.phase = self.parser.phases["inHead"] - - def endTagBr(self, token): - self.parser.parseError("unexpected-inhead-noscript-tag", {"name": token["name"]}) - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - # Caller must raise parse error first! - self.endTagNoscript(impliedTagToken("noscript")) - - class AfterHeadPhase(Phase): - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", - "style", "title"), - self.startTagFromHead), - ("head", self.startTagHead) - ]) - self.startTagHandler.default = self.startTagOther - self.endTagHandler = _utils.MethodDispatcher([(("body", "html", "br"), - self.endTagHtmlBodyBr)]) - self.endTagHandler.default = self.endTagOther - - def processEOF(self): - self.anythingElse() - return True - - def processCharacters(self, token): - self.anythingElse() - return token - - def startTagHtml(self, token): - return self.parser.phases["inBody"].processStartTag(token) - - def startTagBody(self, token): - self.parser.framesetOK = False - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inBody"] - - def startTagFrameset(self, token): - self.tree.insertElement(token) - self.parser.phase = self.parser.phases["inFrameset"] - - def startTagFromHead(self, token): - self.parser.parseError("unexpected-start-tag-out-of-my-head", - {"name": token["name"]}) - self.tree.openElements.append(self.tree.headPointer) - self.parser.phases["inHead"].processStartTag(token) - for node in self.tree.openElements[::-1]: - if node.name == "head": - self.tree.openElements.remove(node) - break - - def startTagHead(self, token): - self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) - - def startTagOther(self, token): - self.anythingElse() - return token - - def endTagHtmlBodyBr(self, token): - self.anythingElse() - return token - - def endTagOther(self, token): - self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) - - def anythingElse(self): - self.tree.insertElement(impliedTagToken("body", "StartTag")) - self.parser.phase = self.parser.phases["inBody"] - self.parser.framesetOK = True - - class InBodyPhase(Phase): - # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody - # the really-really-really-very crazy mode - def __init__(self, parser, tree): - Phase.__init__(self, parser, tree) - - # Set this to the default handler - self.processSpaceCharacters = self.processSpaceCharactersNonPre - - self.startTagHandler = _utils.MethodDispatcher([ - ("html", self.startTagHtml), - (("base", "basefont", "bgsound", "command", "link", "meta", - "script", "style", "title"), - self.startTagProcessInHead), - ("body", self.startTagBody), - ("frameset", self.startTagFrameset), - (("address", "article", "aside", "blockquote", "center", "details", - "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p", - "section", "summary", "ul"), - self.startTagCloseP), - (headingElements, self.startTagHeading), - (("pre", "listing"), self.startTagPreListing), - ("form", self.startTagForm), - (("li", "dd", "dt"), self.startTagListItem), - ("plaintext", self.startTagPlaintext), - ("a", self.startTagA), - (("b", "big", "code", "em", "font", "i", "s", "small", "strike", - "strong", "tt", "u"), self.startTagFormatting), - ("nobr", self.startTagNobr), - ("button", self.startTagButton), - (("applet", "marquee", "object"), self.startTagAppletMarqueeObject), - ("xmp", self.startTagXmp), - ("table", self.startTagTable), - (("area", "br", "embed", "img", "keygen", "wbr"), - self.startTagVoidFormatting), - (("param", "source", "track"), self.startTagParamSource), - ("input", self.startTagInput), - ("hr", self.startTagHr), - ("image", self.startTagImage), - ("isindex", self.startTagIsIndex), - ("textarea", self.startTagTextarea), - ("iframe", self.startTagIFrame), - ("noscript", self.startTagNoscript), - (("noembed", "noframes"), self.startTagRawtext), - ("select", self.startTagSelect), - (("rp", "rt"), self.startTagRpRt), - (("option", "optgroup"), self.startTagOpt), - (("math"), self.startTagMath), - (("svg"), self.startTagSvg), - (("caption", "col", "colgroup", "frame", "head", - "tbody", "td", "tfoot", "th", "thead", - "tr"), self.startTagMisplaced) - ]) - self.startTagHandler.default = self.startTagOther - - self.endTagHandler = _utils.MethodDispatcher([ - ("body", self.endTagBody), - ("html", self.endTagHtml), - (("address", "article", "aside", "blockquote", "button", "center", - "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure", - "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre", - "section", "summary", "ul"), self.endTagBlock), - ("form", self.endTagForm), - ("p", self.endTagP), - (("dd", "dt", "li"), self.endTagListItem), - (headingElements, self.endTagHeading), - (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small", - "strike", "strong", "tt", "u"), self.endTagFormatting), - (("applet", "marquee", "object"), self.endTagAppletMarqueeObject), - ("br", self.endTagBr), - ]) - self.endTagHandler.default = self.endTagOther - - def isMatchingFormattingElement(self, node1, node2): - return (node1.name == node2.name and - node1.namespace == node2.namespace and - node1.attributes == node2.attributes) - - # helper - def addFormattingElement(self, token): - self.tree.insertElement(token) - element = self.tree.openElements[-1] - - matchingElements = [] - for node in self.tree.activeFormattingElements[::-1]: - if node is Marker: - break - elif self.isMatchingFormattingElement(node, element): - matchingElements.append(node) - - assert len(matchingElements) <= 3 - if len(matchingElements) == 3: - self.tree.activeFormattingElements.remove(matchingElements[-1]) - self.tree.activeFormattingElements.append(element) - - # the real deal - def processEOF(self): - allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td", - "tfoot", "th", "thead", "tr", "body", - "html")) - for node in self.tree.openElements[::-1]: - if node.name not in allowed_elements: - self.parser.parseError("expected-closing-tag-but-got-eof") - break - # Stop parsing - - def processSpaceCharactersDropNewline(self, token): - # Sometimes (start of
, , and